Merge pull request #9872 from Budibase/labday/cli-ts

Typescript conversion for CLI
This commit is contained in:
Michael Drury 2023-03-03 13:06:43 +00:00 committed by GitHub
commit 271124b47d
49 changed files with 3562 additions and 497 deletions

View File

@ -22,7 +22,7 @@
"test:watch": "jest --watchAll" "test:watch": "jest --watchAll"
}, },
"dependencies": { "dependencies": {
"@budibase/nano": "10.1.1", "@budibase/nano": "10.1.2",
"@budibase/pouchdb-replication-stream": "1.2.10", "@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/types": "2.3.18-alpha.29", "@budibase/types": "2.3.18-alpha.29",
"@shopify/jest-koa-mocks": "5.0.1", "@shopify/jest-koa-mocks": "5.0.1",

View File

@ -28,6 +28,7 @@ import * as events from "../events"
import * as configs from "../configs" import * as configs from "../configs"
import { clearCookie, getCookie } from "../utils" import { clearCookie, getCookie } from "../utils"
import { ssoSaveUserNoOp } from "../middleware/passport/sso/sso" import { ssoSaveUserNoOp } from "../middleware/passport/sso/sso"
import env from "../environment"
const refresh = require("passport-oauth2-refresh") const refresh = require("passport-oauth2-refresh")
export { export {
@ -52,7 +53,7 @@ export const jwt = require("jsonwebtoken")
_passport.use(new LocalStrategy(local.options, local.authenticate)) _passport.use(new LocalStrategy(local.options, local.authenticate))
if (jwtPassport.options.secretOrKey) { if (jwtPassport.options.secretOrKey) {
_passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate)) _passport.use(new JwtStrategy(jwtPassport.options, jwtPassport.authenticate))
} else { } else if (!env.DISABLE_JWT_WARNING) {
logAlert("No JWT Secret supplied, cannot configure JWT strategy") logAlert("No JWT Secret supplied, cannot configure JWT strategy")
} }

View File

@ -42,7 +42,9 @@ export async function getConfig<T extends Config>(
} }
} }
export async function save(config: Config) { export async function save(
config: Config
): Promise<{ id: string; rev: string }> {
const db = context.getGlobalDB() const db = context.getGlobalDB()
return db.put(config) return db.put(config)
} }

View File

@ -94,6 +94,7 @@ const environment = {
SMTP_HOST: process.env.SMTP_HOST, SMTP_HOST: process.env.SMTP_HOST,
SMTP_PORT: parseInt(process.env.SMTP_PORT || ""), SMTP_PORT: parseInt(process.env.SMTP_PORT || ""),
SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS, SMTP_FROM_ADDRESS: process.env.SMTP_FROM_ADDRESS,
DISABLE_JWT_WARNING: process.env.DISABLE_JWT_WARNING,
/** /**
* Enable to allow an admin user to login using a password. * Enable to allow an admin user to login using a password.
* This can be useful to prevent lockout when configuring SSO. * This can be useful to prevent lockout when configuring SSO.

View File

@ -475,10 +475,10 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/nano@10.1.1": "@budibase/nano@10.1.2":
version "10.1.1" version "10.1.2"
resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.1.tgz#36ccda4d9bb64b5ee14dd2b27a295b40739b1038" resolved "https://registry.yarnpkg.com/@budibase/nano/-/nano-10.1.2.tgz#10fae5a1ab39be6a81261f40e7b7ec6d21cbdd4a"
integrity sha512-kbMIzMkjVtl+xI0UPwVU0/pn8/ccxTyfzwBz6Z+ZiN2oUSb0fJCe0qwA6o8dxwSa8nZu4MbGAeMJl3CJndmWtA== integrity sha512-1w+YN2n/M5aZ9hBKCP4NEjdQbT8BfCLRizkdvm0Je665eEHw3aE1hvo8mon9Ro9QuDdxj1DfDMMFnym6/QUwpQ==
dependencies: dependencies:
"@types/tough-cookie" "^4.0.2" "@types/tough-cookie" "^4.0.2"
axios "^1.1.3" axios "^1.1.3"

View File

@ -6,3 +6,4 @@ docker-error.log
envoy.yaml envoy.yaml
*.tar.gz *.tar.gz
prebuilds/ prebuilds/
dist/

View File

@ -2,15 +2,18 @@
"name": "@budibase/cli", "name": "@budibase/cli",
"version": "2.3.18-alpha.29", "version": "2.3.18-alpha.29",
"description": "Budibase CLI, for developers, self hosting and migrations.", "description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "src/index.js", "main": "dist/index.js",
"bin": { "bin": {
"budi": "src/index.js" "budi": "dist/index.js"
}, },
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"scripts": { "scripts": {
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r node_modules/leveldown/prebuilds prebuilds", "prebuild": "rm -rf prebuilds 2> /dev/null && cp -r node_modules/leveldown/prebuilds prebuilds",
"build": "yarn prebuild && renamer --find .node --replace .fake 'prebuilds/**' && pkg . --out-path build && yarn postbuild", "rename": "renamer --find .node --replace .fake 'prebuilds/**'",
"tsc": "tsc -p tsconfig.build.json",
"pkg": "pkg . --out-path build --no-bytecode --public --public-packages \"*\" -C GZip",
"build": "yarn prebuild && yarn rename && yarn tsc && yarn pkg && yarn postbuild",
"postbuild": "rm -rf prebuilds 2> /dev/null" "postbuild": "rm -rf prebuilds 2> /dev/null"
}, },
"pkg": { "pkg": {
@ -29,7 +32,6 @@
"@budibase/backend-core": "2.3.18-alpha.29", "@budibase/backend-core": "2.3.18-alpha.29",
"@budibase/string-templates": "2.3.18-alpha.29", "@budibase/string-templates": "2.3.18-alpha.29",
"@budibase/types": "2.3.18-alpha.29", "@budibase/types": "2.3.18-alpha.29",
"axios": "0.21.2",
"chalk": "4.1.0", "chalk": "4.1.0",
"cli-progress": "3.11.2", "cli-progress": "3.11.2",
"commander": "7.1.0", "commander": "7.1.0",
@ -40,7 +42,7 @@
"inquirer": "8.0.0", "inquirer": "8.0.0",
"joi": "17.6.0", "joi": "17.6.0",
"lookpath": "1.1.0", "lookpath": "1.1.0",
"node-fetch": "2", "node-fetch": "2.6.7",
"pkg": "5.8.0", "pkg": "5.8.0",
"posthog-node": "1.0.7", "posthog-node": "1.0.7",
"pouchdb": "7.3.0", "pouchdb": "7.3.0",
@ -50,8 +52,15 @@
"yaml": "^2.1.1" "yaml": "^2.1.1"
}, },
"devDependencies": { "devDependencies": {
"@swc/core": "^1.3.25",
"@swc/jest": "^0.2.24",
"@types/jest": "^29.4.0",
"@types/pouchdb": "^6.4.0",
"@types/node-fetch": "2.6.1",
"copyfiles": "^2.4.1", "copyfiles": "^2.4.1",
"eslint": "^7.20.0", "eslint": "^7.20.0",
"renamer": "^4.0.0" "renamer": "^4.0.0",
"ts-node": "^10.9.1",
"typescript": "4.7.3"
} }
} }

View File

@ -1,32 +0,0 @@
const PostHog = require("posthog-node")
const { POSTHOG_TOKEN, AnalyticsEvents } = require("../constants")
const ConfigManager = require("../structures/ConfigManager")
class AnalyticsClient {
constructor() {
this.client = new PostHog(POSTHOG_TOKEN)
this.configManager = new ConfigManager()
}
capture(event) {
if (this.configManager.config.analyticsDisabled) return
this.client.capture(event)
}
enable() {
this.configManager.removeKey("analyticsDisabled")
this.client.capture({ event: AnalyticsEvents.OptIn, distinctId: "cli" })
}
disable() {
this.client.capture({ event: AnalyticsEvents.OptOut, distinctId: "cli" })
this.configManager.setValue("analyticsDisabled", true)
}
status() {
return this.configManager.config.analyticsDisabled ? "disabled" : "enabled"
}
}
module.exports = AnalyticsClient

View File

@ -0,0 +1,33 @@
import PostHog from "posthog-node"
import { POSTHOG_TOKEN, AnalyticsEvent } from "../constants"
import { ConfigManager } from "../structures/ConfigManager"
export class AnalyticsClient {
client: PostHog
configManager: ConfigManager
constructor() {
this.client = new PostHog(POSTHOG_TOKEN, {})
this.configManager = new ConfigManager()
}
capture(event: { distinctId: string; event: string; properties?: any }) {
if (this.configManager.config.analyticsDisabled) return
this.client.capture(event)
}
enable() {
this.configManager.removeKey("analyticsDisabled")
this.client.capture({ event: AnalyticsEvent.OptIn, distinctId: "cli" })
}
disable() {
this.client.capture({ event: AnalyticsEvent.OptOut, distinctId: "cli" })
this.configManager.setValue("analyticsDisabled", true)
}
status() {
return this.configManager.config.analyticsDisabled ? "disabled" : "enabled"
}
}

View File

@ -1,7 +1,7 @@
const Command = require("../structures/Command") import { Command } from "../structures/Command"
const { CommandWords } = require("../constants") import { CommandWord } from "../constants"
const { success, error } = require("../utils") import { success, error } from "../utils"
const AnalyticsClient = require("./Client") import { AnalyticsClient } from "./Client"
const client = new AnalyticsClient() const client = new AnalyticsClient()
@ -14,11 +14,10 @@ async function optOut() {
"Successfully opted out of Budibase analytics. You can opt in at any time by running 'budi analytics opt-in'" "Successfully opted out of Budibase analytics. You can opt in at any time by running 'budi analytics opt-in'"
) )
) )
} catch (err) { } catch (err: any) {
console.log( console.log(
error( error(
"Error opting out of Budibase analytics. Please try again later.", `Error opting out of Budibase analytics. Please try again later - ${err}`
err
) )
) )
} }
@ -50,7 +49,7 @@ async function status() {
} }
} }
const command = new Command(`${CommandWords.ANALYTICS}`) export default new Command(`${CommandWord.ANALYTICS}`)
.addHelp("Control the analytics you send to Budibase.") .addHelp("Control the analytics you send to Budibase.")
.addSubOption("--optin", "Opt in to sending analytics to Budibase", optIn) .addSubOption("--optin", "Opt in to sending analytics to Budibase", optIn)
.addSubOption("--optout", "Opt out of sending analytics to Budibase.", optOut) .addSubOption("--optout", "Opt out of sending analytics to Budibase.", optOut)
@ -59,5 +58,3 @@ const command = new Command(`${CommandWords.ANALYTICS}`)
"Check whether you are currently opted in to Budibase analytics.", "Check whether you are currently opted in to Budibase analytics.",
status status
) )
exports.command = command

View File

@ -1,28 +1,30 @@
const Command = require("../structures/Command") import { Command } from "../structures/Command"
const { CommandWords } = require("../constants") import { CommandWord } from "../constants"
const fs = require("fs") import fs from "fs"
const { join } = require("path") import { join } from "path"
const { getAllDbs } = require("../core/db") import { getAllDbs } from "../core/db"
const tar = require("tar") import { progressBar, httpCall } from "../utils"
const { progressBar, httpCall } = require("../utils") import {
const {
TEMP_DIR, TEMP_DIR,
COUCH_DIR, COUCH_DIR,
MINIO_DIR, MINIO_DIR,
getConfig, getConfig,
replication, replication,
getPouches, getPouches,
} = require("./utils") } from "./utils"
const { exportObjects, importObjects } = require("./objectStore") import { exportObjects, importObjects } from "./objectStore"
const tar = require("tar")
async function exportBackup(opts) { type BackupOpts = { env?: string; import?: string; export?: string }
async function exportBackup(opts: BackupOpts) {
const envFile = opts.env || undefined const envFile = opts.env || undefined
let filename = opts["export"] || opts let filename = opts["export"] || (opts as string)
if (typeof filename !== "string") { if (typeof filename !== "string") {
filename = `backup-${new Date().toISOString()}.tar.gz` filename = `backup-${new Date().toISOString()}.tar.gz`
} }
const config = await getConfig(envFile) const config = await getConfig(envFile)
const dbList = await getAllDbs(config["COUCH_DB_URL"]) const dbList = (await getAllDbs(config["COUCH_DB_URL"])) as string[]
const { Remote, Local } = getPouches(config) const { Remote, Local } = getPouches(config)
if (fs.existsSync(TEMP_DIR)) { if (fs.existsSync(TEMP_DIR)) {
fs.rmSync(TEMP_DIR, { recursive: true }) fs.rmSync(TEMP_DIR, { recursive: true })
@ -55,9 +57,9 @@ async function exportBackup(opts) {
console.log(`Generated export file - ${filename}`) console.log(`Generated export file - ${filename}`)
} }
async function importBackup(opts) { async function importBackup(opts: BackupOpts) {
const envFile = opts.env || undefined const envFile = opts.env || undefined
const filename = opts["import"] || opts const filename = opts["import"] || (opts as string)
const config = await getConfig(envFile) const config = await getConfig(envFile)
if (!filename || !fs.existsSync(filename)) { if (!filename || !fs.existsSync(filename)) {
console.error("Cannot import without specifying a valid file to import") console.error("Cannot import without specifying a valid file to import")
@ -99,7 +101,7 @@ async function importBackup(opts) {
fs.rmSync(TEMP_DIR, { recursive: true }) fs.rmSync(TEMP_DIR, { recursive: true })
} }
async function pickOne(opts) { async function pickOne(opts: BackupOpts) {
if (opts["import"]) { if (opts["import"]) {
return importBackup(opts) return importBackup(opts)
} else if (opts["export"]) { } else if (opts["export"]) {
@ -107,7 +109,7 @@ async function pickOne(opts) {
} }
} }
const command = new Command(`${CommandWords.BACKUPS}`) export default new Command(`${CommandWord.BACKUPS}`)
.addHelp( .addHelp(
"Allows building backups of Budibase, as well as importing a backup to a new instance." "Allows building backups of Budibase, as well as importing a backup to a new instance."
) )
@ -126,5 +128,3 @@ const command = new Command(`${CommandWords.BACKUPS}`)
"Provide an environment variable file to configure the CLI.", "Provide an environment variable file to configure the CLI.",
pickOne pickOne
) )
exports.command = command

View File

@ -1,8 +1,8 @@
const { objectStore } = require("@budibase/backend-core") import { objectStore } from "@budibase/backend-core"
const fs = require("fs") import fs from "fs"
const { join } = require("path") import { join } from "path"
const { TEMP_DIR, MINIO_DIR } = require("./utils") import { TEMP_DIR, MINIO_DIR } from "./utils"
const { progressBar } = require("../utils") import { progressBar } from "../utils"
const { const {
ObjectStoreBuckets, ObjectStoreBuckets,
ObjectStore, ObjectStore,
@ -13,10 +13,10 @@ const {
const bucketList = Object.values(ObjectStoreBuckets) const bucketList = Object.values(ObjectStoreBuckets)
exports.exportObjects = async () => { export async function exportObjects() {
const path = join(TEMP_DIR, MINIO_DIR) const path = join(TEMP_DIR, MINIO_DIR)
fs.mkdirSync(path) fs.mkdirSync(path)
let fullList = [] let fullList: any[] = []
let errorCount = 0 let errorCount = 0
for (let bucket of bucketList) { for (let bucket of bucketList) {
const client = ObjectStore(bucket) const client = ObjectStore(bucket)
@ -26,7 +26,7 @@ exports.exportObjects = async () => {
errorCount++ errorCount++
continue continue
} }
const list = await client.listObjectsV2().promise() const list = (await client.listObjectsV2().promise()) as { Contents: any[] }
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket }))) fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
} }
if (errorCount === bucketList.length) { if (errorCount === bucketList.length) {
@ -48,7 +48,7 @@ exports.exportObjects = async () => {
bar.stop() bar.stop()
} }
exports.importObjects = async () => { export async function importObjects() {
const path = join(TEMP_DIR, MINIO_DIR) const path = join(TEMP_DIR, MINIO_DIR)
const buckets = fs.readdirSync(path) const buckets = fs.readdirSync(path)
let total = 0 let total = 0

View File

@ -1,12 +1,13 @@
const dotenv = require("dotenv") import dotenv from "dotenv"
const fs = require("fs") import fs from "fs"
const { string } = require("../questions") import { string } from "../questions"
const { getPouch } = require("../core/db") import { getPouch } from "../core/db"
const { env: environment } = require("@budibase/backend-core") import { env as environment } from "@budibase/backend-core"
import PouchDB from "pouchdb"
exports.TEMP_DIR = ".temp" export const TEMP_DIR = ".temp"
exports.COUCH_DIR = "couchdb" export const COUCH_DIR = "couchdb"
exports.MINIO_DIR = "minio" export const MINIO_DIR = "minio"
const REQUIRED = [ const REQUIRED = [
{ value: "MAIN_PORT", default: "10000" }, { value: "MAIN_PORT", default: "10000" },
@ -19,7 +20,7 @@ const REQUIRED = [
{ value: "MINIO_SECRET_KEY" }, { value: "MINIO_SECRET_KEY" },
] ]
exports.checkURLs = config => { export function checkURLs(config: Record<string, string>) {
const mainPort = config["MAIN_PORT"], const mainPort = config["MAIN_PORT"],
username = config["COUCH_DB_USER"], username = config["COUCH_DB_USER"],
password = config["COUCH_DB_PASSWORD"] password = config["COUCH_DB_PASSWORD"]
@ -34,23 +35,23 @@ exports.checkURLs = config => {
return config return config
} }
exports.askQuestions = async () => { export async function askQuestions() {
console.log( console.log(
"*** NOTE: use a .env file to load these parameters repeatedly ***" "*** NOTE: use a .env file to load these parameters repeatedly ***"
) )
let config = {} let config: Record<string, string> = {}
for (let property of REQUIRED) { for (let property of REQUIRED) {
config[property.value] = await string(property.value, property.default) config[property.value] = await string(property.value, property.default)
} }
return config return config
} }
exports.loadEnvironment = path => { export function loadEnvironment(path: string) {
if (!fs.existsSync(path)) { if (!fs.existsSync(path)) {
throw "Unable to file specified .env file" throw "Unable to file specified .env file"
} }
const env = fs.readFileSync(path, "utf8") const env = fs.readFileSync(path, "utf8")
const config = exports.checkURLs(dotenv.parse(env)) const config = checkURLs(dotenv.parse(env))
for (let required of REQUIRED) { for (let required of REQUIRED) {
if (!config[required.value]) { if (!config[required.value]) {
throw `Cannot find "${required.value}" property in .env file` throw `Cannot find "${required.value}" property in .env file`
@ -60,12 +61,12 @@ exports.loadEnvironment = path => {
} }
// true is the default value passed by commander // true is the default value passed by commander
exports.getConfig = async (envFile = true) => { export async function getConfig(envFile: boolean | string = true) {
let config let config
if (envFile !== true) { if (envFile !== true) {
config = exports.loadEnvironment(envFile) config = loadEnvironment(envFile as string)
} else { } else {
config = await exports.askQuestions() config = await askQuestions()
} }
// fill out environment // fill out environment
for (let key of Object.keys(config)) { for (let key of Object.keys(config)) {
@ -74,12 +75,16 @@ exports.getConfig = async (envFile = true) => {
return config return config
} }
exports.replication = async (from, to) => { export async function replication(
from: PouchDB.Database,
to: PouchDB.Database
) {
const pouch = getPouch() const pouch = getPouch()
try { try {
await pouch.replicate(from, to, { await pouch.replicate(from, to, {
batch_size: 1000, batch_size: 1000,
batch_limit: 5, batches_limit: 5,
// @ts-ignore
style: "main_only", style: "main_only",
}) })
} catch (err) { } catch (err) {
@ -87,7 +92,7 @@ exports.replication = async (from, to) => {
} }
} }
exports.getPouches = config => { export function getPouches(config: Record<string, string>) {
const Remote = getPouch(config["COUCH_DB_URL"]) const Remote = getPouch(config["COUCH_DB_URL"])
const Local = getPouch() const Local = getPouch()
return { Remote, Local } return { Remote, Local }

View File

@ -1,25 +0,0 @@
const { Event } = require("@budibase/types")
exports.CommandWords = {
BACKUPS: "backups",
HOSTING: "hosting",
ANALYTICS: "analytics",
HELP: "help",
PLUGIN: "plugins",
}
exports.InitTypes = {
QUICK: "quick",
DIGITAL_OCEAN: "do",
}
exports.AnalyticsEvents = {
OptOut: "analytics:opt:out",
OptIn: "analytics:opt:in",
SelfHostInit: "hosting:init",
PluginInit: Event.PLUGIN_INIT,
}
exports.POSTHOG_TOKEN = "phc_yGOn4i7jWKaCTapdGR6lfA4AvmuEQ2ijn5zAVSFYPlS"
exports.GENERATED_USER_EMAIL = "admin@admin.com"

View File

@ -0,0 +1,4 @@
export { CommandWord, InitType, AnalyticsEvent } from "@budibase/types"
export const POSTHOG_TOKEN = "phc_yGOn4i7jWKaCTapdGR6lfA4AvmuEQ2ijn5zAVSFYPlS"
export const GENERATED_USER_EMAIL = "admin@admin.com"

View File

@ -1,12 +1,12 @@
const PouchDB = require("pouchdb") import PouchDB from "pouchdb"
const { checkSlashesInUrl } = require("../utils") import { checkSlashesInUrl } from "../utils"
const fetch = require("node-fetch") import fetch from "node-fetch"
/** /**
* Fully qualified URL including username and password, or nothing for local * Fully qualified URL including username and password, or nothing for local
*/ */
exports.getPouch = (url = undefined) => { export function getPouch(url?: string) {
let POUCH_DB_DEFAULTS = {} let POUCH_DB_DEFAULTS
if (!url) { if (!url) {
POUCH_DB_DEFAULTS = { POUCH_DB_DEFAULTS = {
prefix: undefined, prefix: undefined,
@ -19,11 +19,12 @@ exports.getPouch = (url = undefined) => {
} }
const replicationStream = require("pouchdb-replication-stream") const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin) PouchDB.plugin(replicationStream.plugin)
// @ts-ignore
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream) PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
return PouchDB.defaults(POUCH_DB_DEFAULTS) return PouchDB.defaults(POUCH_DB_DEFAULTS) as PouchDB.Static
} }
exports.getAllDbs = async url => { export async function getAllDbs(url: string) {
const response = await fetch( const response = await fetch(
checkSlashesInUrl(encodeURI(`${url}/_all_dbs`)), checkSlashesInUrl(encodeURI(`${url}/_all_dbs`)),
{ {

View File

@ -1,2 +1,3 @@
process.env.NO_JS = "1" process.env.NO_JS = "1"
process.env.JS_BCRYPT = "1" process.env.JS_BCRYPT = "1"
process.env.DISABLE_JWT_WARNING = "1"

View File

@ -1,11 +0,0 @@
const AnalyticsClient = require("./analytics/Client")
const client = new AnalyticsClient()
exports.captureEvent = (event, properties) => {
client.capture({
distinctId: "cli",
event,
properties,
})
}

View File

@ -0,0 +1,11 @@
import { AnalyticsClient } from "./analytics/Client"
const client = new AnalyticsClient()
export function captureEvent(event: string, properties: any) {
client.capture({
distinctId: "cli",
event,
properties,
})
}

View File

@ -1,21 +1,21 @@
const util = require("util") import util from "util"
const exec = util.promisify(require("child_process").exec) const runCommand = util.promisify(require("child_process").exec)
exports.exec = async (command, dir = "./") => { export async function exec(command: string, dir = "./") {
const { stdout } = await exec(command, { cwd: dir }) const { stdout } = await runCommand(command, { cwd: dir })
return stdout return stdout
} }
exports.utilityInstalled = async utilName => { export async function utilityInstalled(utilName: string) {
try { try {
await exports.exec(`${utilName} --version`) await exec(`${utilName} --version`)
return true return true
} catch (err) { } catch (err) {
return false return false
} }
} }
exports.runPkgCommand = async (command, dir = "./") => { export async function runPkgCommand(command: string, dir = "./") {
const yarn = await exports.utilityInstalled("yarn") const yarn = await exports.utilityInstalled("yarn")
const npm = await exports.utilityInstalled("npm") const npm = await exports.utilityInstalled("npm")
if (!yarn && !npm) { if (!yarn && !npm) {

View File

@ -2,15 +2,16 @@ const { success } = require("../utils")
const { updateDockerComposeService } = require("./utils") const { updateDockerComposeService } = require("./utils")
const randomString = require("randomstring") const randomString = require("randomstring")
const { GENERATED_USER_EMAIL } = require("../constants") const { GENERATED_USER_EMAIL } = require("../constants")
import { DockerCompose } from "./types"
exports.generateUser = async (password, silent) => { export async function generateUser(password: string | null, silent: boolean) {
const email = GENERATED_USER_EMAIL const email = GENERATED_USER_EMAIL
if (!password) { if (!password) {
password = randomString.generate({ length: 6 }) password = randomString.generate({ length: 6 })
} }
updateDockerComposeService(service => { updateDockerComposeService((service: DockerCompose) => {
service.environment["BB_ADMIN_USER_EMAIL"] = email service.environment["BB_ADMIN_USER_EMAIL"] = email
service.environment["BB_ADMIN_USER_PASSWORD"] = password service.environment["BB_ADMIN_USER_PASSWORD"] = password as string
}) })
if (!silent) { if (!silent) {
console.log( console.log(

View File

@ -1,14 +1,14 @@
const Command = require("../structures/Command") import { Command } from "../structures/Command"
const { CommandWords } = require("../constants") import { CommandWord } from "../constants"
const { init } = require("./init") import { init } from "./init"
const { start } = require("./start") import { start } from "./start"
const { stop } = require("./stop") import { stop } from "./stop"
const { status } = require("./status") import { status } from "./status"
const { update } = require("./update") import { update } from "./update"
const { generateUser } = require("./genUser") import { generateUser } from "./genUser"
const { watchPlugins } = require("./watch") import { watchPlugins } from "./watch"
const command = new Command(`${CommandWords.HOSTING}`) export default new Command(`${CommandWord.HOSTING}`)
.addHelp("Controls self hosting on the Budibase platform.") .addHelp("Controls self hosting on the Budibase platform.")
.addSubOption( .addSubOption(
"--init [type]", "--init [type]",
@ -46,5 +46,3 @@ const command = new Command(`${CommandWords.HOSTING}`)
generateUser generateUser
) )
.addSubOption("--single", "Specify this with init to use the single image.") .addSubOption("--single", "Specify this with init to use the single image.")
exports.command = command

View File

@ -1,24 +1,25 @@
const { InitTypes, AnalyticsEvents } = require("../constants") import { InitType, AnalyticsEvent } from "../constants"
const { confirmation } = require("../questions") import { confirmation } from "../questions"
const { captureEvent } = require("../events") import { captureEvent } from "../events"
const makeFiles = require("./makeFiles") import * as makeFiles from "./makeFiles"
const axios = require("axios") import { parseEnv } from "../utils"
const { parseEnv } = require("../utils") import { checkDockerConfigured, downloadDockerCompose } from "./utils"
const { checkDockerConfigured, downloadFiles } = require("./utils") import { watchPlugins } from "./watch"
const { watchPlugins } = require("./watch") import { generateUser } from "./genUser"
const { generateUser } = require("./genUser") import fetch from "node-fetch"
const DO_USER_DATA_URL = "http://169.254.169.254/metadata/v1/user-data" const DO_USER_DATA_URL = "http://169.254.169.254/metadata/v1/user-data"
async function getInitConfig(type, isQuick, port) { async function getInitConfig(type: string, isQuick: boolean, port: number) {
const config = isQuick ? makeFiles.QUICK_CONFIG : {} const config: any = isQuick ? makeFiles.QUICK_CONFIG : {}
if (type === InitTypes.DIGITAL_OCEAN) { if (type === InitType.DIGITAL_OCEAN) {
try { try {
const output = await axios.get(DO_USER_DATA_URL) const output = await fetch(DO_USER_DATA_URL)
const response = parseEnv(output.data) const data = await output.text()
const response = parseEnv(data)
for (let [key, value] of Object.entries(makeFiles.ConfigMap)) { for (let [key, value] of Object.entries(makeFiles.ConfigMap)) {
if (response[key]) { if (response[key]) {
config[value] = response[key] config[value as string] = response[key]
} }
} }
} catch (err) { } catch (err) {
@ -32,7 +33,7 @@ async function getInitConfig(type, isQuick, port) {
return config return config
} }
exports.init = async opts => { export async function init(opts: any) {
let type, isSingle, watchDir, genUser, port, silent let type, isSingle, watchDir, genUser, port, silent
if (typeof opts === "string") { if (typeof opts === "string") {
type = opts type = opts
@ -44,7 +45,7 @@ exports.init = async opts => {
port = opts["port"] port = opts["port"]
silent = opts["silent"] silent = opts["silent"]
} }
const isQuick = type === InitTypes.QUICK || type === InitTypes.DIGITAL_OCEAN const isQuick = type === InitType.QUICK || type === InitType.DIGITAL_OCEAN
await checkDockerConfigured() await checkDockerConfigured()
if (!isQuick) { if (!isQuick) {
const shouldContinue = await confirmation( const shouldContinue = await confirmation(
@ -55,12 +56,12 @@ exports.init = async opts => {
return return
} }
} }
captureEvent(AnalyticsEvents.SelfHostInit, { captureEvent(AnalyticsEvent.SelfHostInit, {
type, type,
}) })
const config = await getInitConfig(type, isQuick, port) const config = await getInitConfig(type, isQuick, port)
if (!isSingle) { if (!isSingle) {
await downloadFiles() await downloadDockerCompose()
await makeFiles.makeEnv(config, silent) await makeFiles.makeEnv(config, silent)
} else { } else {
await makeFiles.makeSingleCompose(config, silent) await makeFiles.makeSingleCompose(config, silent)

View File

@ -1,15 +1,15 @@
const { number } = require("../questions") import { number } from "../questions"
const { success, stringifyToDotEnv } = require("../utils") import { success, stringifyToDotEnv } from "../utils"
const fs = require("fs") import fs from "fs"
const path = require("path") import path from "path"
import yaml from "yaml"
import { getAppService } from "./utils"
const randomString = require("randomstring") const randomString = require("randomstring")
const yaml = require("yaml")
const { getAppService } = require("./utils")
const SINGLE_IMAGE = "budibase/budibase:latest" const SINGLE_IMAGE = "budibase/budibase:latest"
const VOL_NAME = "budibase_data" const VOL_NAME = "budibase_data"
const COMPOSE_PATH = path.resolve("./docker-compose.yaml") export const COMPOSE_PATH = path.resolve("./docker-compose.yaml")
const ENV_PATH = path.resolve("./.env") export const ENV_PATH = path.resolve("./.env")
function getSecrets(opts = { single: false }) { function getSecrets(opts = { single: false }) {
const secrets = [ const secrets = [
@ -19,7 +19,7 @@ function getSecrets(opts = { single: false }) {
"REDIS_PASSWORD", "REDIS_PASSWORD",
"INTERNAL_API_KEY", "INTERNAL_API_KEY",
] ]
const obj = {} const obj: Record<string, string> = {}
secrets.forEach(secret => (obj[secret] = randomString.generate())) secrets.forEach(secret => (obj[secret] = randomString.generate()))
// setup couch creds separately // setup couch creds separately
if (opts && opts.single) { if (opts && opts.single) {
@ -32,7 +32,7 @@ function getSecrets(opts = { single: false }) {
return obj return obj
} }
function getSingleCompose(port) { function getSingleCompose(port: number) {
const singleComposeObj = { const singleComposeObj = {
version: "3", version: "3",
services: { services: {
@ -53,7 +53,7 @@ function getSingleCompose(port) {
return yaml.stringify(singleComposeObj) return yaml.stringify(singleComposeObj)
} }
function getEnv(port) { function getEnv(port: number) {
const partOne = stringifyToDotEnv({ const partOne = stringifyToDotEnv({
MAIN_PORT: port, MAIN_PORT: port,
}) })
@ -77,19 +77,21 @@ function getEnv(port) {
].join("\n") ].join("\n")
} }
exports.ENV_PATH = ENV_PATH export const ConfigMap = {
exports.COMPOSE_PATH = COMPOSE_PATH
module.exports.ConfigMap = {
MAIN_PORT: "port", MAIN_PORT: "port",
} }
module.exports.QUICK_CONFIG = { export const QUICK_CONFIG = {
key: "budibase", key: "budibase",
port: 10000, port: 10000,
} }
async function make(path, contentsFn, inputs = {}, silent) { async function make(
path: string,
contentsFn: Function,
inputs: any = {},
silent: boolean
) {
const port = const port =
inputs.port || inputs.port ||
(await number( (await number(
@ -107,15 +109,15 @@ async function make(path, contentsFn, inputs = {}, silent) {
} }
} }
module.exports.makeEnv = async (inputs = {}, silent) => { export async function makeEnv(inputs: any = {}, silent: boolean) {
return make(ENV_PATH, getEnv, inputs, silent) return make(ENV_PATH, getEnv, inputs, silent)
} }
module.exports.makeSingleCompose = async (inputs = {}, silent) => { export async function makeSingleCompose(inputs: any = {}, silent: boolean) {
return make(COMPOSE_PATH, getSingleCompose, inputs, silent) return make(COMPOSE_PATH, getSingleCompose, inputs, silent)
} }
module.exports.getEnvProperty = property => { export function getEnvProperty(property: string) {
const props = fs.readFileSync(ENV_PATH, "utf8").split(property) const props = fs.readFileSync(ENV_PATH, "utf8").split(property)
if (props[0].charAt(0) === "=") { if (props[0].charAt(0) === "=") {
property = props[0] property = props[0]
@ -125,7 +127,7 @@ module.exports.getEnvProperty = property => {
return property.split("=")[1].split("\n")[0] return property.split("=")[1].split("\n")[0]
} }
module.exports.getComposeProperty = property => { export function getComposeProperty(property: string) {
const { service } = getAppService(COMPOSE_PATH) const { service } = getAppService(COMPOSE_PATH)
if (property === "port" && Array.isArray(service.ports)) { if (property === "port" && Array.isArray(service.ports)) {
const port = service.ports[0] const port = service.ports[0]

View File

@ -1,14 +1,10 @@
const { import { checkDockerConfigured, checkInitComplete, handleError } from "./utils"
checkDockerConfigured, import { info, success } from "../utils"
checkInitComplete, import * as makeFiles from "./makeFiles"
handleError, import compose from "docker-compose"
} = require("./utils") import fs from "fs"
const { info, success } = require("../utils")
const makeFiles = require("./makeFiles")
const compose = require("docker-compose")
const fs = require("fs")
exports.start = async () => { export async function start() {
await checkDockerConfigured() await checkDockerConfigured()
checkInitComplete() checkInitComplete()
console.log( console.log(

View File

@ -1,12 +1,8 @@
const { import { checkDockerConfigured, checkInitComplete, handleError } from "./utils"
checkDockerConfigured, import { info } from "../utils"
checkInitComplete, import compose from "docker-compose"
handleError,
} = require("./utils")
const { info } = require("../utils")
const compose = require("docker-compose")
exports.status = async () => { export async function status() {
await checkDockerConfigured() await checkDockerConfigured()
checkInitComplete() checkInitComplete()
console.log(info("Budibase status")) console.log(info("Budibase status"))

View File

@ -1,12 +1,8 @@
const { import { checkDockerConfigured, checkInitComplete, handleError } from "./utils"
checkDockerConfigured, import { info, success } from "../utils"
checkInitComplete, import compose from "docker-compose"
handleError,
} = require("./utils")
const { info, success } = require("../utils")
const compose = require("docker-compose")
exports.stop = async () => { export async function stop() {
await checkDockerConfigured() await checkDockerConfigured()
checkInitComplete() checkInitComplete()
console.log(info("Stopping services, this may take a moment.")) console.log(info("Stopping services, this may take a moment."))

View File

@ -0,0 +1,4 @@
export interface DockerCompose {
environment: Record<string, string>
volumes: string[]
}

View File

@ -1,20 +1,20 @@
const { import {
checkDockerConfigured, checkDockerConfigured,
checkInitComplete, checkInitComplete,
downloadFiles, downloadDockerCompose,
handleError, handleError,
getServices, getServices,
} = require("./utils") } from "./utils"
const { confirmation } = require("../questions") import { confirmation } from "../questions"
const compose = require("docker-compose") import compose from "docker-compose"
const { COMPOSE_PATH } = require("./makeFiles") import { COMPOSE_PATH } from "./makeFiles"
const { info, success } = require("../utils") import { info, success } from "../utils"
const { start } = require("./start") import { start } from "./start"
const BB_COMPOSE_SERVICES = ["app-service", "worker-service", "proxy-service"] const BB_COMPOSE_SERVICES = ["app-service", "worker-service", "proxy-service"]
const BB_SINGLE_SERVICE = ["budibase"] const BB_SINGLE_SERVICE = ["budibase"]
exports.update = async () => { export async function update() {
const { services } = getServices(COMPOSE_PATH) const { services } = getServices(COMPOSE_PATH)
const isSingle = Object.keys(services).length === 1 const isSingle = Object.keys(services).length === 1
await checkDockerConfigured() await checkDockerConfigured()
@ -23,7 +23,7 @@ exports.update = async () => {
!isSingle && !isSingle &&
(await confirmation("Do you wish to update you docker-compose.yaml?")) (await confirmation("Do you wish to update you docker-compose.yaml?"))
) { ) {
await downloadFiles() await downloadDockerCompose()
} }
await handleError(async () => { await handleError(async () => {
const status = await compose.ps() const status = await compose.ps()

View File

@ -1,24 +1,24 @@
const { lookpath } = require("lookpath") import { lookpath } from "lookpath"
const fs = require("fs") import fs from "fs"
const makeFiles = require("./makeFiles") import * as makeFiles from "./makeFiles"
const { logErrorToFile, downloadFile, error } = require("../utils") import { logErrorToFile, downloadFile, error } from "../utils"
const yaml = require("yaml") import yaml from "yaml"
import { DockerCompose } from "./types"
const ERROR_FILE = "docker-error.log" const ERROR_FILE = "docker-error.log"
const FILE_URLS = [ const COMPOSE_URL =
"https://raw.githubusercontent.com/Budibase/budibase/master/hosting/docker-compose.yaml", "https://raw.githubusercontent.com/Budibase/budibase/master/hosting/docker-compose.yaml"
]
exports.downloadFiles = async () => { export async function downloadDockerCompose() {
const promises = [] const fileName = COMPOSE_URL.split("/").slice(-1)[0]
for (let url of FILE_URLS) { try {
const fileName = url.split("/").slice(-1)[0] await downloadFile(COMPOSE_URL, `./${fileName}`)
promises.push(downloadFile(url, `./${fileName}`)) } catch (err) {
console.error(error(`Failed to retrieve compose file - ${err}`))
} }
await Promise.all(promises)
} }
exports.checkDockerConfigured = async () => { export async function checkDockerConfigured() {
const error = const error =
"docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose" "docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose"
const docker = await lookpath("docker") const docker = await lookpath("docker")
@ -28,7 +28,7 @@ exports.checkDockerConfigured = async () => {
} }
} }
exports.checkInitComplete = () => { export function checkInitComplete() {
if ( if (
!fs.existsSync(makeFiles.ENV_PATH) && !fs.existsSync(makeFiles.ENV_PATH) &&
!fs.existsSync(makeFiles.COMPOSE_PATH) !fs.existsSync(makeFiles.COMPOSE_PATH)
@ -37,10 +37,10 @@ exports.checkInitComplete = () => {
} }
} }
exports.handleError = async func => { export async function handleError(func: Function) {
try { try {
await func() await func()
} catch (err) { } catch (err: any) {
if (err && err.err) { if (err && err.err) {
logErrorToFile(ERROR_FILE, err.err) logErrorToFile(ERROR_FILE, err.err)
} }
@ -48,14 +48,14 @@ exports.handleError = async func => {
} }
} }
exports.getServices = path => { export function getServices(path: string) {
const dockerYaml = fs.readFileSync(path, "utf8") const dockerYaml = fs.readFileSync(path, "utf8")
const parsedYaml = yaml.parse(dockerYaml) const parsedYaml = yaml.parse(dockerYaml)
return { yaml: parsedYaml, services: parsedYaml.services } return { yaml: parsedYaml, services: parsedYaml.services }
} }
exports.getAppService = path => { export function getAppService(path: string) {
const { yaml, services } = exports.getServices(path), const { yaml, services } = getServices(path),
serviceList = Object.keys(services) serviceList = Object.keys(services)
let service let service
if (services["app-service"]) { if (services["app-service"]) {
@ -66,14 +66,17 @@ exports.getAppService = path => {
return { yaml, service } return { yaml, service }
} }
exports.updateDockerComposeService = updateFn => { export function updateDockerComposeService(
// eslint-disable-next-line no-unused-vars
updateFn: (service: DockerCompose) => void
) {
const opts = ["docker-compose.yaml", "docker-compose.yml"] const opts = ["docker-compose.yaml", "docker-compose.yml"]
const dockerFilePath = opts.find(name => fs.existsSync(name)) const dockerFilePath = opts.find(name => fs.existsSync(name))
if (!dockerFilePath) { if (!dockerFilePath) {
console.log(error("Unable to locate docker-compose YAML.")) console.log(error("Unable to locate docker-compose YAML."))
return return
} }
const { yaml: parsedYaml, service } = exports.getAppService(dockerFilePath) const { yaml: parsedYaml, service } = getAppService(dockerFilePath)
if (!service) { if (!service) {
console.log( console.log(
error( error(

View File

@ -1,9 +1,10 @@
const { resolve } = require("path") import { resolve } from "path"
const fs = require("fs") import fs from "fs"
const { error, success } = require("../utils") import { error, success } from "../utils"
const { updateDockerComposeService } = require("./utils") import { updateDockerComposeService } from "./utils"
import { DockerCompose } from "./types"
exports.watchPlugins = async (pluginPath, silent) => { export async function watchPlugins(pluginPath: string, silent: boolean) {
const PLUGIN_PATH = "/plugins" const PLUGIN_PATH = "/plugins"
// get absolute path // get absolute path
pluginPath = resolve(pluginPath) pluginPath = resolve(pluginPath)
@ -15,7 +16,7 @@ exports.watchPlugins = async (pluginPath, silent) => {
) )
return return
} }
updateDockerComposeService(service => { updateDockerComposeService((service: DockerCompose) => {
// set environment variable // set environment variable
service.environment["PLUGINS_DIR"] = PLUGIN_PATH service.environment["PLUGINS_DIR"] = PLUGIN_PATH
// add volumes to parsed yaml // add volumes to parsed yaml

View File

@ -1,10 +1,10 @@
#!/usr/bin/env node #!/usr/bin/env node
require("./prebuilds") import "./prebuilds"
require("./environment") import "./environment"
import { getCommands } from "./options"
import { Command } from "commander"
import { getHelpDescription } from "./utils"
const json = require("../package.json") const json = require("../package.json")
const { getCommands } = require("./options")
const { Command } = require("commander")
const { getHelpDescription } = require("./utils")
// add hosting config // add hosting config
async function init() { async function init() {

View File

@ -1,8 +0,0 @@
const analytics = require("./analytics")
const hosting = require("./hosting")
const backups = require("./backups")
const plugins = require("./plugins")
exports.getCommands = () => {
return [hosting.command, analytics.command, backups.command, plugins.command]
}

View File

@ -0,0 +1,8 @@
import analytics from "./analytics"
import hosting from "./hosting"
import backups from "./backups"
import plugins from "./plugins"
export function getCommands() {
return [hosting, analytics, backups, plugins]
}

View File

@ -1,18 +1,22 @@
const Command = require("../structures/Command") import { Command } from "../structures/Command"
const { CommandWords, AnalyticsEvents, InitTypes } = require("../constants") import { CommandWord, AnalyticsEvent, InitType } from "../constants"
const { getSkeleton, fleshOutSkeleton } = require("./skeleton") import { getSkeleton, fleshOutSkeleton } from "./skeleton"
const questions = require("../questions") import * as questions from "../questions"
const fs = require("fs") import fs from "fs"
const { PLUGIN_TYPE_ARR } = require("@budibase/types") import { PluginType, PLUGIN_TYPE_ARR } from "@budibase/types"
const { plugins } = require("@budibase/backend-core") import { plugins } from "@budibase/backend-core"
const { runPkgCommand } = require("../exec") import { runPkgCommand } from "../exec"
const { join } = require("path") import { join } from "path"
const { success, error, info, moveDirectory } = require("../utils") import { success, error, info, moveDirectory } from "../utils"
const { captureEvent } = require("../events") import { captureEvent } from "../events"
import { GENERATED_USER_EMAIL } from "../constants"
import { init as hostingInit } from "../hosting/init"
import { start as hostingStart } from "../hosting/start"
const fp = require("find-free-port") const fp = require("find-free-port")
const { GENERATED_USER_EMAIL } = require("../constants")
const { init: hostingInit } = require("../hosting/init") type PluginOpts = {
const { start: hostingStart } = require("../hosting/start") init?: PluginType
}
function checkInPlugin() { function checkInPlugin() {
if (!fs.existsSync("package.json")) { if (!fs.existsSync("package.json")) {
@ -27,7 +31,7 @@ function checkInPlugin() {
} }
} }
async function askAboutTopLevel(name) { async function askAboutTopLevel(name: string) {
const files = fs.readdirSync(process.cwd()) const files = fs.readdirSync(process.cwd())
// we are in an empty git repo, don't ask // we are in an empty git repo, don't ask
if (files.find(file => file === ".git")) { if (files.find(file => file === ".git")) {
@ -45,8 +49,8 @@ async function askAboutTopLevel(name) {
} }
} }
async function init(opts) { async function init(opts: PluginOpts) {
const type = opts["init"] || opts const type = opts["init"] || (opts as PluginType)
if (!type || !PLUGIN_TYPE_ARR.includes(type)) { if (!type || !PLUGIN_TYPE_ARR.includes(type)) {
console.log( console.log(
error( error(
@ -82,7 +86,7 @@ async function init(opts) {
} else { } else {
console.log(info(`Plugin created in directory "${name}"`)) console.log(info(`Plugin created in directory "${name}"`))
} }
captureEvent(AnalyticsEvents.PluginInit, { captureEvent(AnalyticsEvent.PluginInit, {
type, type,
name, name,
description, description,
@ -109,7 +113,7 @@ async function verify() {
version = pkgJson.version version = pkgJson.version
plugins.validate(schemaJson) plugins.validate(schemaJson)
return { name, version } return { name, version }
} catch (err) { } catch (err: any) {
if (err && err.message && err.message.includes("not valid JSON")) { if (err && err.message && err.message.includes("not valid JSON")) {
console.log(error(`schema.json is not valid JSON: ${err.message}`)) console.log(error(`schema.json is not valid JSON: ${err.message}`))
} else { } else {
@ -120,7 +124,7 @@ async function verify() {
async function build() { async function build() {
const verified = await verify() const verified = await verify()
if (!verified.name) { if (!verified?.name) {
return return
} }
console.log(success("Verified!")) console.log(success("Verified!"))
@ -132,7 +136,7 @@ async function build() {
async function watch() { async function watch() {
const verified = await verify() const verified = await verify()
if (!verified.name) { if (!verified?.name) {
return return
} }
const output = join("dist", `${verified.name}-${verified.version}.tar.gz`) const output = join("dist", `${verified.name}-${verified.version}.tar.gz`)
@ -150,7 +154,7 @@ async function dev() {
const [port] = await fp(10000) const [port] = await fp(10000)
const password = "admin" const password = "admin"
await hostingInit({ await hostingInit({
init: InitTypes.QUICK, init: InitType.QUICK,
single: true, single: true,
watchPluginDir: pluginDir, watchPluginDir: pluginDir,
genUser: password, genUser: password,
@ -168,7 +172,7 @@ async function dev() {
console.log(success("Password: ") + info(password)) console.log(success("Password: ") + info(password))
} }
const command = new Command(`${CommandWords.PLUGIN}`) export default new Command(`${CommandWord.PLUGIN}`)
.addHelp( .addHelp(
"Custom plugins for Budibase, init, build and verify your components and datasources with this tool." "Custom plugins for Budibase, init, build and verify your components and datasources with this tool."
) )
@ -192,5 +196,3 @@ const command = new Command(`${CommandWords.PLUGIN}`)
"Run a development environment which automatically watches the current directory.", "Run a development environment which automatically watches the current directory.",
dev dev
) )
exports.command = command

View File

@ -1,21 +1,21 @@
const fetch = require("node-fetch") import fetch from "node-fetch"
import fs from "fs"
import os from "os"
import { join } from "path"
import { processStringSync } from "@budibase/string-templates"
const download = require("download") const download = require("download")
const fs = require("fs")
const os = require("os")
const { join } = require("path")
const tar = require("tar") const tar = require("tar")
const { processStringSync } = require("@budibase/string-templates")
const HBS_FILES = ["package.json.hbs", "schema.json.hbs", "README.md.hbs"] const HBS_FILES = ["package.json.hbs", "schema.json.hbs", "README.md.hbs"]
async function getSkeletonUrl(type) { async function getSkeletonUrl(type: string) {
const resp = await fetch( const resp = await fetch(
"https://api.github.com/repos/budibase/budibase-skeleton/releases/latest" "https://api.github.com/repos/budibase/budibase-skeleton/releases/latest"
) )
if (resp.status >= 300) { if (resp.status >= 300) {
throw new Error("Failed to retrieve skeleton metadata") throw new Error("Failed to retrieve skeleton metadata")
} }
const json = await resp.json() const json = (await resp.json()) as { assets: any[] }
for (let asset of json["assets"]) { for (let asset of json["assets"]) {
if (asset.name && asset.name.includes(type)) { if (asset.name && asset.name.includes(type)) {
return asset["browser_download_url"] return asset["browser_download_url"]
@ -24,7 +24,7 @@ async function getSkeletonUrl(type) {
throw new Error("No skeleton found in latest release.") throw new Error("No skeleton found in latest release.")
} }
exports.getSkeleton = async (type, name) => { export async function getSkeleton(type: string, name: string) {
const url = await getSkeletonUrl(type) const url = await getSkeletonUrl(type)
const tarballFile = join(os.tmpdir(), "skeleton.tar.gz") const tarballFile = join(os.tmpdir(), "skeleton.tar.gz")
@ -40,7 +40,12 @@ exports.getSkeleton = async (type, name) => {
fs.rmSync(tarballFile) fs.rmSync(tarballFile)
} }
exports.fleshOutSkeleton = async (type, name, description, version) => { export async function fleshOutSkeleton(
type: string,
name: string,
description: string,
version: string
) {
for (let file of HBS_FILES) { for (let file of HBS_FILES) {
const oldFile = join(name, file), const oldFile = join(name, file),
newFile = join(name, file.substring(0, file.length - 4)) newFile = join(name, file.substring(0, file.length - 4))

View File

@ -1,7 +1,8 @@
const os = require("os") import os from "os"
const { join } = require("path") import { join } from "path"
const fs = require("fs") import fs from "fs"
const { error } = require("./utils") import { error } from "./utils"
const PREBUILDS = "prebuilds" const PREBUILDS = "prebuilds"
const ARCH = `${os.platform()}-${os.arch()}` const ARCH = `${os.platform()}-${os.arch()}`
const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH) const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH)
@ -26,8 +27,8 @@ function checkForBinaries() {
} }
} }
function cleanup(evt) { function cleanup(evt?: number) {
if (!isNaN(evt)) { if (evt && !isNaN(evt)) {
return return
} }
if (evt) { if (evt) {

View File

@ -1,6 +1,6 @@
const inquirer = require("inquirer") const inquirer = require("inquirer")
exports.confirmation = async question => { export async function confirmation(question: string) {
const config = { const config = {
type: "confirm", type: "confirm",
message: question, message: question,
@ -10,8 +10,8 @@ exports.confirmation = async question => {
return (await inquirer.prompt(config)).confirmation return (await inquirer.prompt(config)).confirmation
} }
exports.string = async (question, defaultString = null) => { export async function string(question: string, defaultString?: string) {
const config = { const config: any = {
type: "input", type: "input",
name: "string", name: "string",
message: question, message: question,
@ -22,12 +22,12 @@ exports.string = async (question, defaultString = null) => {
return (await inquirer.prompt(config)).string return (await inquirer.prompt(config)).string
} }
exports.number = async (question, defaultNumber) => { export async function number(question: string, defaultNumber?: number) {
const config = { const config: any = {
type: "input", type: "input",
name: "number", name: "number",
message: question, message: question,
validate: value => { validate: (value: string) => {
let valid = !isNaN(parseFloat(value)) let valid = !isNaN(parseFloat(value))
return valid || "Please enter a number" return valid || "Please enter a number"
}, },

View File

@ -1,19 +1,31 @@
const { import {
getSubHelpDescription, getSubHelpDescription,
getHelpDescription, getHelpDescription,
error, error,
capitaliseFirstLetter, capitaliseFirstLetter,
} = require("../utils") } from "../utils"
class Command { type CommandOpt = {
constructor(command, func = null) { command: string
help: string
func?: Function
extras: any[]
}
export class Command {
command: string
opts: CommandOpt[]
func?: Function
help?: string
constructor(command: string, func?: Function) {
// if there are options, need to just get the command name // if there are options, need to just get the command name
this.command = command this.command = command
this.opts = [] this.opts = []
this.func = func this.func = func
} }
convertToCommander(lookup) { convertToCommander(lookup: string) {
const parts = lookup.toLowerCase().split("-") const parts = lookup.toLowerCase().split("-")
// camel case, separate out first // camel case, separate out first
const first = parts.shift() const first = parts.shift()
@ -22,21 +34,26 @@ class Command {
.join("") .join("")
} }
addHelp(help) { addHelp(help: string) {
this.help = help this.help = help
return this return this
} }
addSubOption(command, help, func, extras = []) { addSubOption(
command: string,
help: string,
func?: Function,
extras: any[] = []
) {
this.opts.push({ command, help, func, extras }) this.opts.push({ command, help, func, extras })
return this return this
} }
configure(program) { configure(program: any) {
const thisCmd = this const thisCmd = this
let command = program.command(thisCmd.command) let command = program.command(thisCmd.command)
if (this.help) { if (this.help) {
command = command.description(getHelpDescription(thisCmd.help)) command = command.description(getHelpDescription(thisCmd.help!))
} }
for (let opt of thisCmd.opts) { for (let opt of thisCmd.opts) {
command = command.option(opt.command, getSubHelpDescription(opt.help)) command = command.option(opt.command, getSubHelpDescription(opt.help))
@ -45,7 +62,7 @@ class Command {
"--help", "--help",
getSubHelpDescription(`Get help with ${this.command} options`) getSubHelpDescription(`Get help with ${this.command} options`)
) )
command.action(async options => { command.action(async (options: Record<string, string>) => {
try { try {
let executed = false, let executed = false,
found = false found = false
@ -53,7 +70,7 @@ class Command {
let lookup = opt.command.split(" ")[0].replace("--", "") let lookup = opt.command.split(" ")[0].replace("--", "")
// need to handle how commander converts watch-plugin-dir to watchPluginDir // need to handle how commander converts watch-plugin-dir to watchPluginDir
lookup = this.convertToCommander(lookup) lookup = this.convertToCommander(lookup)
found = !executed && options[lookup] found = !executed && !!options[lookup]
if (found && opt.func) { if (found && opt.func) {
const input = const input =
Object.keys(options).length > 1 ? options : options[lookup] Object.keys(options).length > 1 ? options : options[lookup]
@ -69,11 +86,9 @@ class Command {
console.log(error(`Unknown ${this.command} option.`)) console.log(error(`Unknown ${this.command} option.`))
command.help() command.help()
} }
} catch (err) { } catch (err: any) {
console.log(error(err)) console.log(error(err))
} }
}) })
} }
} }
module.exports = Command

View File

@ -3,7 +3,9 @@ const path = require("path")
const os = require("os") const os = require("os")
const { error } = require("../utils") const { error } = require("../utils")
class ConfigManager { export class ConfigManager {
path: string
constructor() { constructor() {
this.path = path.join(os.homedir(), ".budibase.json") this.path = path.join(os.homedir(), ".budibase.json")
if (!fs.existsSync(this.path)) { if (!fs.existsSync(this.path)) {
@ -24,26 +26,24 @@ class ConfigManager {
} }
} }
set config(json) { set config(json: any) {
fs.writeFileSync(this.path, JSON.stringify(json)) fs.writeFileSync(this.path, JSON.stringify(json))
} }
getValue(key) { getValue(key: string) {
return this.config[key] return this.config[key]
} }
setValue(key, value) { setValue(key: string, value: any) {
this.config = { this.config = {
...this.config, ...this.config,
[key]: value, [key]: value,
} }
} }
removeKey(key) { removeKey(key: string) {
const updated = { ...this.config } const updated = { ...this.config }
delete updated[key] delete updated[key]
this.config = updated this.config = updated
} }
} }
module.exports = ConfigManager

View File

@ -1,106 +0,0 @@
const chalk = require("chalk")
const fs = require("fs")
const axios = require("axios")
const path = require("path")
const progress = require("cli-progress")
const { join } = require("path")
exports.downloadFile = async (url, filePath) => {
filePath = path.resolve(filePath)
const writer = fs.createWriteStream(filePath)
const response = await axios({
url,
method: "GET",
responseType: "stream",
})
response.data.pipe(writer)
return new Promise((resolve, reject) => {
writer.on("finish", resolve)
writer.on("error", reject)
})
}
exports.httpCall = async (url, method) => {
const response = await axios({
url,
method,
})
return response.data
}
exports.getHelpDescription = string => {
return chalk.cyan(string)
}
exports.getSubHelpDescription = string => {
return chalk.green(string)
}
exports.error = error => {
return chalk.red(`Error - ${error}`)
}
exports.success = success => {
return chalk.green(success)
}
exports.info = info => {
return chalk.cyan(info)
}
exports.logErrorToFile = (file, error) => {
fs.writeFileSync(path.resolve(`./${file}`), `Budibase Error\n${error}`)
}
exports.parseEnv = env => {
const lines = env.toString().split("\n")
let result = {}
for (const line of lines) {
const match = line.match(/^([^=:#]+?)[=:](.*)/)
if (match) {
result[match[1].trim()] = match[2].trim()
}
}
return result
}
exports.progressBar = total => {
const bar = new progress.SingleBar({}, progress.Presets.shades_classic)
bar.start(total, 0)
return bar
}
exports.checkSlashesInUrl = url => {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}
exports.moveDirectory = (oldPath, newPath) => {
const files = fs.readdirSync(oldPath)
// check any file exists already
for (let file of files) {
if (fs.existsSync(join(newPath, file))) {
throw new Error(
"Unable to remove top level directory - some skeleton files already exist."
)
}
}
for (let file of files) {
fs.renameSync(join(oldPath, file), join(newPath, file))
}
fs.rmdirSync(oldPath)
}
exports.capitaliseFirstLetter = str => {
return str.charAt(0).toUpperCase() + str.slice(1)
}
exports.stringifyToDotEnv = json => {
let str = ""
for (let [key, value] of Object.entries(json)) {
str += `${key}=${value}\n`
}
return str
}

112
packages/cli/src/utils.ts Normal file
View File

@ -0,0 +1,112 @@
import chalk from "chalk"
import fs from "fs"
import path from "path"
import { join } from "path"
import fetch from "node-fetch"
const progress = require("cli-progress")
export function downloadFile(url: string, filePath: string) {
return new Promise((resolve, reject) => {
filePath = path.resolve(filePath)
fetch(url, {
method: "GET",
})
.then(response => {
const writer = fs.createWriteStream(filePath)
if (response.body) {
response.body.pipe(writer)
response.body.on("end", resolve)
response.body.on("error", reject)
} else {
throw new Error(
`Unable to retrieve docker-compose file - ${response.status}`
)
}
})
.catch(err => {
throw err
})
})
}
export async function httpCall(url: string, method: string) {
const response = await fetch(url, {
method,
})
return response.body
}
export function getHelpDescription(str: string) {
return chalk.cyan(str)
}
export function getSubHelpDescription(str: string) {
return chalk.green(str)
}
export function error(err: string | number) {
process.exitCode = -1
return chalk.red(`Error - ${err}`)
}
export function success(str: string) {
return chalk.green(str)
}
export function info(str: string) {
return chalk.cyan(str)
}
export function logErrorToFile(file: string, error: string) {
fs.writeFileSync(path.resolve(`./${file}`), `Budibase Error\n${error}`)
}
export function parseEnv(env: string) {
const lines = env.toString().split("\n")
let result: Record<string, string> = {}
for (const line of lines) {
const match = line.match(/^([^=:#]+?)[=:](.*)/)
if (match) {
result[match[1].trim()] = match[2].trim()
}
}
return result
}
export function progressBar(total: number) {
const bar = new progress.SingleBar({}, progress.Presets.shades_classic)
bar.start(total, 0)
return bar
}
export function checkSlashesInUrl(url: string) {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}
export function moveDirectory(oldPath: string, newPath: string) {
const files = fs.readdirSync(oldPath)
// check any file exists already
for (let file of files) {
if (fs.existsSync(join(newPath, file))) {
throw new Error(
"Unable to remove top level directory - some skeleton files already exist."
)
}
}
for (let file of files) {
fs.renameSync(join(oldPath, file), join(newPath, file))
}
fs.rmdirSync(oldPath)
}
export function capitaliseFirstLetter(str: string) {
return str.charAt(0).toUpperCase() + str.slice(1)
}
export function stringifyToDotEnv(json: Record<string, string | number>) {
let str = ""
for (let [key, value] of Object.entries(json)) {
str += `${key}=${value}\n`
}
return str
}

3
packages/cli/start.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
dir="$(dirname -- "$(readlink -f "${BASH_SOURCE}")")"
${dir}/node_modules/ts-node/dist/bin.js ${dir}/src/index.ts $@

View File

@ -0,0 +1,24 @@
{
"compilerOptions": {
"target": "es6",
"module": "commonjs",
"lib": ["es2020"],
"strict": true,
"noImplicitAny": true,
"esModuleInterop": true,
"resolveJsonModule": true,
"incremental": true,
"types": [ "node", "jest" ],
"outDir": "dist",
"skipLibCheck": true
},
"include": [
"src/**/*"
],
"exclude": [
"node_modules",
"dist",
"**/*.spec.ts",
"**/*.spec.js"
]
}

View File

@ -0,0 +1,32 @@
{
"extends": "./tsconfig.build.json",
"compilerOptions": {
"composite": true,
"declaration": true,
"sourceMap": true,
"baseUrl": ".",
"paths": {
"@budibase/types": ["../types/src"],
"@budibase/backend-core": ["../backend-core/src"],
"@budibase/backend-core/*": ["../backend-core/*"],
"@budibase/pro": ["../../../budibase-pro/packages/pro/src"]
}
},
"ts-node": {
"require": ["tsconfig-paths/register"],
"swc": true
},
"references": [
{ "path": "../types" },
{ "path": "../backend-core" },
{ "path": "../../../budibase-pro/packages/pro" }
],
"include": [
"src/**/*",
"package.json"
],
"exclude": [
"node_modules",
"dist"
]
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,21 @@
import { Event } from "../events"
export enum CommandWord {
BACKUPS = "backups",
HOSTING = "hosting",
ANALYTICS = "analytics",
HELP = "help",
PLUGIN = "plugins",
}
export enum InitType {
QUICK = "quick",
DIGITAL_OCEAN = "do",
}
export const AnalyticsEvent = {
OptOut: "analytics:opt:out",
OptIn: "analytics:opt:in",
SelfHostInit: "hosting:init",
PluginInit: Event.PLUGIN_INIT,
}

View File

@ -0,0 +1 @@
export * from "./constants"

View File

@ -15,3 +15,4 @@ export * from "./environmentVariables"
export * from "./auditLogs" export * from "./auditLogs"
export * from "./sso" export * from "./sso"
export * from "./user" export * from "./user"
export * from "./cli"