Merge pull request #6584 from Budibase/labday/backups

CLI backups functionality
This commit is contained in:
Michael Drury 2022-07-19 11:13:59 +01:00 committed by GitHub
commit 4e38662dab
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 1514 additions and 3440 deletions

View File

@ -11,10 +11,11 @@ services:
- minio_data:/data - minio_data:/data
ports: ports:
- "${MINIO_PORT}:9000" - "${MINIO_PORT}:9000"
- "9001:9001"
environment: environment:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY} MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY} MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
command: server /data command: server /data --console-address ":9001"
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s interval: 30s

View File

@ -63,7 +63,7 @@ services:
MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY} MINIO_ACCESS_KEY: ${MINIO_ACCESS_KEY}
MINIO_SECRET_KEY: ${MINIO_SECRET_KEY} MINIO_SECRET_KEY: ${MINIO_SECRET_KEY}
MINIO_BROWSER: "off" MINIO_BROWSER: "off"
command: server /data command: server /data --console-address ":9001"
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"] test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
interval: 30s interval: 30s

View File

@ -102,6 +102,13 @@ exports.getPouch = (opts = {}) => {
} }
} }
if (opts.onDisk) {
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "leveldb",
}
}
if (opts.replication) { if (opts.replication) {
const replicationStream = require("pouchdb-replication-stream") const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin) PouchDB.plugin(replicationStream.plugin)

View File

@ -75,9 +75,11 @@ export const ObjectStore = (bucket: any) => {
s3ForcePathStyle: true, s3ForcePathStyle: true,
signatureVersion: "v4", signatureVersion: "v4",
apiVersion: "2006-03-01", apiVersion: "2006-03-01",
params: { }
if (bucket) {
config.params = {
Bucket: sanitizeBucket(bucket), Bucket: sanitizeBucket(bucket),
}, }
} }
if (env.MINIO_URL) { if (env.MINIO_URL) {
config.endpoint = env.MINIO_URL config.endpoint = env.MINIO_URL
@ -292,6 +294,7 @@ export const uploadDirectory = async (
} }
} }
await Promise.all(uploads) await Promise.all(uploads)
return files
} }
exports.downloadTarballDirect = async (url: string, path: string) => { exports.downloadTarballDirect = async (url: string, path: string) => {

File diff suppressed because it is too large Load Diff

View File

@ -4,3 +4,4 @@ nginx.conf
build/ build/
docker-error.log docker-error.log
envoy.yaml envoy.yaml
*.tar.gz

View File

@ -9,28 +9,43 @@
"author": "Budibase", "author": "Budibase",
"license": "GPL-3.0", "license": "GPL-3.0",
"scripts": { "scripts": {
"build": "pkg . --out-path build" "prebuild": "rm -rf prebuilds 2> /dev/null && cp -r node_modules/leveldown/prebuilds prebuilds",
"build": "yarn prebuild && renamer --find .node --replace .fake 'prebuilds/**' && pkg . --out-path build && yarn postbuild",
"postbuild": "rm -rf prebuilds 2> /dev/null"
}, },
"pkg": { "pkg": {
"targets": [ "targets": [
"node14-linux", "node16-linux",
"node14-win", "node16-win",
"node14-macos" "node16-macos"
],
"assets": [
"node_modules/@budibase/backend-core/dist/**/*",
"prebuilds/**/*"
], ],
"outputPath": "build" "outputPath": "build"
}, },
"dependencies": { "dependencies": {
"axios": "^0.21.1", "@budibase/backend-core": "^1.1.15-alpha.1",
"chalk": "^4.1.0", "axios": "0.21.1",
"commander": "^7.1.0", "chalk": "4.1.0",
"docker-compose": "^0.23.6", "cli-progress": "3.11.2",
"inquirer": "^8.0.0", "commander": "7.1.0",
"lookpath": "^1.1.0", "docker-compose": "0.23.6",
"pkg": "^5.3.0", "dotenv": "16.0.1",
"inquirer": "8.0.0",
"lookpath": "1.1.0",
"node-fetch": "2",
"pkg": "5.7.0",
"posthog-node": "1.0.7", "posthog-node": "1.0.7",
"randomstring": "^1.1.5" "pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9",
"randomstring": "1.1.5",
"tar": "6.1.11"
}, },
"devDependencies": { "devDependencies": {
"eslint": "^7.20.0" "copyfiles": "^2.4.1",
"eslint": "^7.20.0",
"renamer": "^4.0.0"
} }
} }

View File

@ -0,0 +1,121 @@
const Command = require("../structures/Command")
const { CommandWords } = require("../constants")
const fs = require("fs")
const { join } = require("path")
const { getAllDbs } = require("../core/db")
const tar = require("tar")
const { progressBar } = require("../utils")
const {
TEMP_DIR,
COUCH_DIR,
MINIO_DIR,
getConfig,
replication,
getPouches,
} = require("./utils")
const { exportObjects, importObjects } = require("./objectStore")
async function exportBackup(opts) {
const envFile = opts.env || undefined
let filename = opts["export"] || opts
if (typeof filename !== "string") {
filename = `backup-${new Date().toISOString()}.tar.gz`
}
const config = await getConfig(envFile)
const dbList = await getAllDbs(config["COUCH_DB_URL"])
const { Remote, Local } = getPouches(config)
if (fs.existsSync(TEMP_DIR)) {
fs.rmSync(TEMP_DIR, { recursive: true })
}
const couchDir = join(TEMP_DIR, COUCH_DIR)
fs.mkdirSync(TEMP_DIR)
fs.mkdirSync(couchDir)
console.log("CouchDB Export")
const bar = progressBar(dbList.length)
let count = 0
for (let db of dbList) {
bar.update(++count)
const remote = new Remote(db)
const local = new Local(join(TEMP_DIR, COUCH_DIR, db))
await replication(remote, local)
}
bar.stop()
console.log("S3 Export")
await exportObjects()
tar.create(
{
sync: true,
gzip: true,
file: filename,
cwd: join(TEMP_DIR),
},
[COUCH_DIR, MINIO_DIR]
)
fs.rmSync(TEMP_DIR, { recursive: true })
console.log(`Generated export file - ${filename}`)
}
async function importBackup(opts) {
const envFile = opts.env || undefined
const filename = opts["import"] || opts
const config = await getConfig(envFile)
if (!filename || !fs.existsSync(filename)) {
console.error("Cannot import without specifying a valid file to import")
process.exit(-1)
}
if (fs.existsSync(TEMP_DIR)) {
fs.rmSync(TEMP_DIR, { recursive: true })
}
fs.mkdirSync(TEMP_DIR)
tar.extract({
sync: true,
cwd: join(TEMP_DIR),
file: filename,
})
const { Remote, Local } = getPouches(config)
const dbList = fs.readdirSync(join(TEMP_DIR, COUCH_DIR))
console.log("CouchDB Import")
const bar = progressBar(dbList.length)
let count = 0
for (let db of dbList) {
bar.update(++count)
const remote = new Remote(db)
const local = new Local(join(TEMP_DIR, COUCH_DIR, db))
await replication(local, remote)
}
bar.stop()
console.log("MinIO Import")
await importObjects()
console.log("Import complete")
fs.rmSync(TEMP_DIR, { recursive: true })
}
async function pickOne(opts) {
if (opts["import"]) {
return importBackup(opts)
} else if (opts["export"]) {
return exportBackup(opts)
}
}
const command = new Command(`${CommandWords.BACKUPS}`)
.addHelp(
"Allows building backups of Budibase, as well as importing a backup to a new instance."
)
.addSubOption(
"--export [filename]",
"Export a backup from an existing Budibase installation.",
exportBackup
)
.addSubOption(
"--import [filename]",
"Import a backup to a new Budibase installation.",
importBackup
)
.addSubOption(
"--env [envFile]",
"Provide an environment variable file to configure the CLI.",
pickOne
)
exports.command = command

View File

@ -0,0 +1,63 @@
const {
ObjectStoreBuckets,
ObjectStore,
retrieve,
uploadDirectory,
makeSureBucketExists,
} = require("@budibase/backend-core/objectStore")
const fs = require("fs")
const { join } = require("path")
const { TEMP_DIR, MINIO_DIR } = require("./utils")
const { progressBar } = require("../utils")
const bucketList = Object.values(ObjectStoreBuckets)
exports.exportObjects = async () => {
const path = join(TEMP_DIR, MINIO_DIR)
fs.mkdirSync(path)
let fullList = []
for (let bucket of bucketList) {
const client = ObjectStore(bucket)
try {
await client.headBucket().promise()
} catch (err) {
continue
}
const list = await client.listObjectsV2().promise()
fullList = fullList.concat(list.Contents.map(el => ({ ...el, bucket })))
}
const bar = progressBar(fullList.length)
let count = 0
for (let object of fullList) {
const filename = object.Key
const data = await retrieve(object.bucket, filename)
const possiblePath = filename.split("/")
if (possiblePath.length > 1) {
const dirs = possiblePath.slice(0, possiblePath.length - 1)
fs.mkdirSync(join(path, object.bucket, ...dirs), { recursive: true })
}
fs.writeFileSync(join(path, object.bucket, ...possiblePath), data)
bar.update(++count)
}
bar.stop()
}
exports.importObjects = async () => {
const path = join(TEMP_DIR, MINIO_DIR)
const buckets = fs.readdirSync(path)
let total = 0
buckets.forEach(bucket => {
const files = fs.readdirSync(join(path, bucket))
total += files.length
})
const bar = progressBar(total)
let count = 0
for (let bucket of buckets) {
const client = ObjectStore(bucket)
await makeSureBucketExists(client, bucket)
const files = await uploadDirectory(bucket, join(path, bucket), "/")
count += files.length
bar.update(count)
}
bar.stop()
}

View File

@ -0,0 +1,88 @@
const dotenv = require("dotenv")
const fs = require("fs")
const { string } = require("../questions")
const { getPouch } = require("../core/db")
exports.DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/"
exports.DEFAULT_MINIO = "http://localhost:10000/"
exports.TEMP_DIR = ".temp"
exports.COUCH_DIR = "couchdb"
exports.MINIO_DIR = "minio"
const REQUIRED = [
{ value: "MAIN_PORT", default: "10000" },
{ value: "COUCH_DB_URL", default: exports.DEFAULT_COUCH },
{ value: "MINIO_URL", default: exports.DEFAULT_MINIO },
{ value: "MINIO_ACCESS_KEY" },
{ value: "MINIO_SECRET_KEY" },
]
exports.checkURLs = config => {
const mainPort = config["MAIN_PORT"],
username = config["COUCH_DB_USER"],
password = config["COUCH_DB_PASSWORD"]
if (!config["COUCH_DB_URL"] && mainPort && username && password) {
config[
"COUCH_DB_URL"
] = `http://${username}:${password}@localhost:${mainPort}/db/`
}
if (!config["MINIO_URL"]) {
config["MINIO_URL"] = exports.DEFAULT_MINIO
}
return config
}
exports.askQuestions = async () => {
console.log(
"*** NOTE: use a .env file to load these parameters repeatedly ***"
)
let config = {}
for (let property of REQUIRED) {
config[property.value] = await string(property.value, property.default)
}
return config
}
exports.loadEnvironment = path => {
if (!fs.existsSync(path)) {
throw "Unable to file specified .env file"
}
const env = fs.readFileSync(path, "utf8")
const config = exports.checkURLs(dotenv.parse(env))
for (let required of REQUIRED) {
if (!config[required.value]) {
throw `Cannot find "${required.value}" property in .env file`
}
}
return config
}
// true is the default value passed by commander
exports.getConfig = async (envFile = true) => {
let config
if (envFile !== true) {
config = exports.loadEnvironment(envFile)
} else {
config = await exports.askQuestions()
}
return config
}
exports.replication = (from, to) => {
return new Promise((resolve, reject) => {
from.replicate
.to(to)
.on("complete", () => {
resolve()
})
.on("error", err => {
reject(err)
})
})
}
exports.getPouches = config => {
const Remote = getPouch(config["COUCH_DB_URL"])
const Local = getPouch()
return { Remote, Local }
}

View File

@ -1,4 +1,5 @@
exports.CommandWords = { exports.CommandWords = {
BACKUPS: "backups",
HOSTING: "hosting", HOSTING: "hosting",
ANALYTICS: "analytics", ANALYTICS: "analytics",
HELP: "help", HELP: "help",

View File

@ -0,0 +1,38 @@
const PouchDB = require("pouchdb")
const { checkSlashesInUrl } = require("../utils")
const fetch = require("node-fetch")
/**
* Fully qualified URL including username and password, or nothing for local
*/
exports.getPouch = (url = undefined) => {
let POUCH_DB_DEFAULTS = {}
if (!url) {
POUCH_DB_DEFAULTS = {
prefix: undefined,
adapter: "leveldb",
}
} else {
POUCH_DB_DEFAULTS = {
prefix: url,
}
}
const replicationStream = require("pouchdb-replication-stream")
PouchDB.plugin(replicationStream.plugin)
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
return PouchDB.defaults(POUCH_DB_DEFAULTS)
}
exports.getAllDbs = async url => {
const response = await fetch(
checkSlashesInUrl(encodeURI(`${url}/_all_dbs`)),
{
method: "GET",
}
)
if (response.status === 200) {
return await response.json()
} else {
throw "Cannot connect to CouchDB instance"
}
}

View File

@ -1,4 +1,5 @@
#!/usr/bin/env node #!/usr/bin/env node
require("./prebuilds")
const { getCommands } = require("./options") const { getCommands } = require("./options")
const { Command } = require("commander") const { Command } = require("commander")
const { getHelpDescription } = require("./utils") const { getHelpDescription } = require("./utils")

View File

@ -1,6 +1,7 @@
const analytics = require("./analytics") const analytics = require("./analytics")
const hosting = require("./hosting") const hosting = require("./hosting")
const backups = require("./backups")
exports.getCommands = () => { exports.getCommands = () => {
return [hosting.command, analytics.command] return [hosting.command, analytics.command, backups.command]
} }

View File

@ -0,0 +1,34 @@
const os = require("os")
const { join } = require("path")
const fs = require("fs")
const PREBUILDS = "prebuilds"
const ARCH = `${os.platform()}-${os.arch()}`
const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH)
checkForBinaries()
function checkForBinaries() {
const readDir = join(__filename, "..", "..", PREBUILDS, ARCH)
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
return
}
const natives = fs.readdirSync(readDir)
if (fs.existsSync(readDir)) {
fs.mkdirSync(PREBUILD_DIR, { recursive: true })
for (let native of natives) {
const filename = `${native.split(".fake")[0]}.node`
fs.cpSync(join(readDir, native), join(PREBUILD_DIR, filename))
}
}
}
function cleanup() {
if (fs.existsSync(PREBUILD_DIR)) {
fs.rmSync(PREBUILD_DIR, { recursive: true })
}
}
const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"]
events.forEach(event => {
process.on(event, cleanup)
})

View File

@ -39,8 +39,10 @@ class Command {
let executed = false let executed = false
for (let opt of thisCmd.opts) { for (let opt of thisCmd.opts) {
const lookup = opt.command.split(" ")[0].replace("--", "") const lookup = opt.command.split(" ")[0].replace("--", "")
if (options[lookup]) { if (!executed && options[lookup]) {
await opt.func(options[lookup]) const input =
Object.keys(options).length > 1 ? options : options[lookup]
await opt.func(input)
executed = true executed = true
} }
} }

View File

@ -2,6 +2,7 @@ const chalk = require("chalk")
const fs = require("fs") const fs = require("fs")
const axios = require("axios") const axios = require("axios")
const path = require("path") const path = require("path")
const progress = require("cli-progress")
exports.downloadFile = async (url, filePath) => { exports.downloadFile = async (url, filePath) => {
filePath = path.resolve(filePath) filePath = path.resolve(filePath)
@ -56,3 +57,13 @@ exports.parseEnv = env => {
} }
return result return result
} }
exports.progressBar = total => {
const bar = new progress.SingleBar({}, progress.Presets.shades_classic)
bar.start(total, 0)
return bar
}
exports.checkSlashesInUrl = url => {
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -291,12 +291,12 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@1.1.14": "@budibase/backend-core@1.1.15-alpha.1":
version "1.1.14" version "1.1.15-alpha.1"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.1.14.tgz#c4cb61dc8b841b6ae37aeb1e34c418fffb7aa597" resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-1.1.15-alpha.1.tgz#fb2b726a9afe301aaedbf09a5bcfa82ef14fa7b9"
integrity sha512-fElYKOj53VtlzySGpXTjP2kyyllz7VZESVbcS5fJsoREBzP3JFY4a4F89U8GGqjPO5KS3AsfLJqWIjU6aE+yow== integrity sha512-tVujXhAA7E8h9DbmAeRmje/CcJKwWvPIk8og6o46kmkdLx+7lwm4AG4ImrsR9PoRtvhkdUClAUwuGtFGcsafwg==
dependencies: dependencies:
"@budibase/types" "^1.1.14" "@budibase/types" "^1.1.15-alpha.1"
"@techpass/passport-openidconnect" "0.3.2" "@techpass/passport-openidconnect" "0.3.2"
aws-sdk "2.1030.0" aws-sdk "2.1030.0"
bcrypt "5.0.1" bcrypt "5.0.1"
@ -324,19 +324,24 @@
uuid "8.3.2" uuid "8.3.2"
zlib "1.0.5" zlib "1.0.5"
"@budibase/pro@1.1.14": "@budibase/pro@1.1.15-alpha.1":
version "1.1.14" version "1.1.15-alpha.1"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.1.14.tgz#c99755c239e3f7c957c02c8c709649077409c73b" resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-1.1.15-alpha.1.tgz#8013b5bdb6adea291bf29a32f9c572e5cc1f9fc8"
integrity sha512-S88/Fc2hkc2SRNXQrHdBNX+yiywv7ufsOeLO7blA8Ygec0FmCB81yKl6NWIZJ2RbrBIfGD2XqwXmBT6AvxKosQ== integrity sha512-8DwIs12un59YnLNlqUFQgGqclf4Dmpp76Yo4cVDeRkaKDvbRJoUUK7jkYsDpstU6FVXD8m6/0l8Pwr3gWN5iyQ==
dependencies: dependencies:
"@budibase/backend-core" "1.1.14" "@budibase/backend-core" "1.1.15-alpha.1"
"@budibase/types" "1.1.14" "@budibase/types" "1.1.15-alpha.1"
node-fetch "^2.6.1" node-fetch "^2.6.1"
"@budibase/types@1.1.14", "@budibase/types@^1.1.14": "@budibase/types@1.1.15-alpha.1":
version "1.1.14" version "1.1.15-alpha.1"
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.1.14.tgz#519a0b16d7356324082d51efc7b313d4f31793f0" resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.1.15-alpha.1.tgz#4abb0830e3c1dca4a49bc974371edda922f8253b"
integrity sha512-h0bma0AN4HiML2v6ZEnc0dcHs4CWi2i49x1jI5r6q4w61wdFt4eQ2pDS/Qv1R+BRBGGsbAwkxJRWoBxnqdBVuw== integrity sha512-x00f0/JY2CayjGEBR9R2cH/87nFV1dg2bZHXdMIWN6djcQjBsMjkaq+Qx2xJtWPMcld9yufPbBWdfgVQsiPc0A==
"@budibase/types@^1.1.15-alpha.1":
version "1.1.16"
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-1.1.16.tgz#4dd1f0b1e630abd46749414d74a1fdd07820df54"
integrity sha512-jaOdsCOx0CJ2tyKodTI6PMo9CNHTo1nsMMrRi/XFIFQtGOypkiNoskb5u0Ee3GtpN6LNXgwPdrYnh+vcIL9lRw==
"@cspotcode/source-map-consumer@0.8.0": "@cspotcode/source-map-consumer@0.8.0":
version "0.8.0" version "0.8.0"