Adding prebuild management for pouchDB leveldown.
This commit is contained in:
parent
a9d927b713
commit
4354ae76cb
|
@ -9,13 +9,19 @@
|
|||
"author": "Budibase",
|
||||
"license": "GPL-3.0",
|
||||
"scripts": {
|
||||
"build": "pkg . --out-path build"
|
||||
"prebuild": "rm -rf prebuilds 2> /dev/null && cp -r node_modules/leveldown/prebuilds prebuilds",
|
||||
"build": "yarn prebuild && renamer --find .node --replace .fake 'prebuilds/**' && pkg . --out-path build && yarn postbuild",
|
||||
"postbuild": "rm -rf prebuilds 2> /dev/null"
|
||||
},
|
||||
"pkg": {
|
||||
"targets": [
|
||||
"node14-linux",
|
||||
"node14-win",
|
||||
"node14-macos"
|
||||
"node16-linux",
|
||||
"node16-win",
|
||||
"node16-macos"
|
||||
],
|
||||
"assets": [
|
||||
"node_modules/@budibase/backend-core/dist/**/*",
|
||||
"prebuilds/**/*"
|
||||
],
|
||||
"outputPath": "build"
|
||||
},
|
||||
|
@ -29,12 +35,17 @@
|
|||
"dotenv": "^16.0.1",
|
||||
"inquirer": "^8.0.0",
|
||||
"lookpath": "^1.1.0",
|
||||
"pkg": "^5.3.0",
|
||||
"node-fetch": "2",
|
||||
"pkg": "^5.7.0",
|
||||
"posthog-node": "1.0.7",
|
||||
"pouchdb": "^7.3.0",
|
||||
"pouchdb-replication-stream": "^1.2.9",
|
||||
"randomstring": "^1.1.5",
|
||||
"tar": "^6.1.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^7.20.0"
|
||||
"copyfiles": "^2.4.1",
|
||||
"eslint": "^7.20.0",
|
||||
"renamer": "^4.0.0"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ const Command = require("../structures/Command")
|
|||
const { CommandWords } = require("../constants")
|
||||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const { getAllDbs } = require("@budibase/backend-core/db")
|
||||
const { getAllDbs } = require("../core/db")
|
||||
const tar = require("tar")
|
||||
const { progressBar } = require("../utils")
|
||||
const {
|
||||
|
@ -22,9 +22,9 @@ async function exportBackup(opts) {
|
|||
if (typeof filename !== "string") {
|
||||
filename = `backup-${new Date().toISOString()}.tar.gz`
|
||||
}
|
||||
await getConfig(envFile)
|
||||
const dbList = await getAllDbs()
|
||||
const { Remote, Local } = getPouches()
|
||||
const config = await getConfig(envFile)
|
||||
const dbList = await getAllDbs(config["COUCH_DB_URL"])
|
||||
const { Remote, Local } = getPouches(config)
|
||||
if (fs.existsSync(TEMP_DIR)) {
|
||||
fs.rmSync(TEMP_DIR, { recursive: true })
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ async function exportBackup(opts) {
|
|||
async function importBackup(opts) {
|
||||
const envFile = opts.env || undefined
|
||||
const filename = opts["import"] || opts
|
||||
await getConfig(envFile)
|
||||
const config = await getConfig(envFile)
|
||||
if (!filename || !fs.existsSync(filename)) {
|
||||
console.error("Cannot import without specifying a valid file to import")
|
||||
process.exit(-1)
|
||||
|
@ -73,7 +73,7 @@ async function importBackup(opts) {
|
|||
cwd: join(TEMP_DIR),
|
||||
file: filename,
|
||||
})
|
||||
const { Remote, Local } = getPouches()
|
||||
const { Remote, Local } = getPouches(config)
|
||||
const dbList = fs.readdirSync(join(TEMP_DIR, COUCH_DIR))
|
||||
console.log("CouchDB Import")
|
||||
const bar = progressBar(dbList.length)
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
const dotenv = require("dotenv")
|
||||
const fs = require("fs")
|
||||
const { string } = require("../questions")
|
||||
const { env } = require("@budibase/backend-core")
|
||||
const { getPouch } = require("@budibase/backend-core/db")
|
||||
const { getPouch } = require("../core/db")
|
||||
|
||||
exports.DEFAULT_COUCH = "http://budibase:budibase@localhost:10000/db/"
|
||||
exports.DEFAULT_MINIO = "http://localhost:10000/"
|
||||
|
@ -66,9 +65,6 @@ exports.getConfig = async (envFile = true) => {
|
|||
} else {
|
||||
config = await exports.askQuestions()
|
||||
}
|
||||
for (let required of REQUIRED) {
|
||||
env._set(required.value, config[required.value])
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
|
@ -85,8 +81,8 @@ exports.replication = (from, to) => {
|
|||
})
|
||||
}
|
||||
|
||||
exports.getPouches = () => {
|
||||
const Remote = getPouch({ replication: true })
|
||||
const Local = getPouch({ onDisk: true, directory: exports.TEMP_DIR })
|
||||
exports.getPouches = config => {
|
||||
const Remote = getPouch(config["COUCH_DB_URL"])
|
||||
const Local = getPouch()
|
||||
return { Remote, Local }
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
const PouchDB = require("pouchdb")
|
||||
const { checkSlashesInUrl } = require("../utils")
|
||||
const fetch = require("node-fetch")
|
||||
|
||||
/**
|
||||
* Fully qualified URL including username and password, or nothing for local
|
||||
*/
|
||||
exports.getPouch = (url = undefined) => {
|
||||
let POUCH_DB_DEFAULTS = {}
|
||||
if (!url) {
|
||||
POUCH_DB_DEFAULTS = {
|
||||
prefix: undefined,
|
||||
adapter: "leveldb",
|
||||
}
|
||||
} else {
|
||||
POUCH_DB_DEFAULTS = {
|
||||
prefix: url,
|
||||
}
|
||||
}
|
||||
const replicationStream = require("pouchdb-replication-stream")
|
||||
PouchDB.plugin(replicationStream.plugin)
|
||||
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
|
||||
return PouchDB.defaults(POUCH_DB_DEFAULTS)
|
||||
}
|
||||
|
||||
exports.getAllDbs = async url => {
|
||||
const response = await fetch(
|
||||
checkSlashesInUrl(encodeURI(`${url}/_all_dbs`)),
|
||||
{
|
||||
method: "GET",
|
||||
}
|
||||
)
|
||||
if (response.status === 200) {
|
||||
return await response.json()
|
||||
} else {
|
||||
throw "Cannot connect to CouchDB instance"
|
||||
}
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env node
|
||||
require("./prebuilds")
|
||||
const { getCommands } = require("./options")
|
||||
const { Command } = require("commander")
|
||||
const { getHelpDescription } = require("./utils")
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
const os = require("os")
|
||||
const { join } = require("path")
|
||||
const fs = require("fs")
|
||||
const PREBUILDS = "prebuilds"
|
||||
const ARCH = `${os.platform()}-${os.arch()}`
|
||||
const PREBUILD_DIR = join(process.execPath, "..", PREBUILDS, ARCH)
|
||||
|
||||
checkForBinaries()
|
||||
|
||||
function checkForBinaries() {
|
||||
if (fs.existsSync(PREBUILD_DIR)) {
|
||||
return
|
||||
}
|
||||
const readDir = join(__filename, "..", "..", PREBUILDS, ARCH)
|
||||
const natives = fs.readdirSync(readDir)
|
||||
if (fs.existsSync(readDir)) {
|
||||
fs.mkdirSync(PREBUILD_DIR, { recursive: true })
|
||||
for (let native of natives) {
|
||||
const filename = `${native.split(".fake")[0]}.node`
|
||||
fs.cpSync(join(readDir, native), join(PREBUILD_DIR, filename))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
process.on("exit", () => {
|
||||
if (fs.existsSync(PREBUILD_DIR)) {
|
||||
fs.rmSync(PREBUILD_DIR, { recursive: true })
|
||||
}
|
||||
})
|
|
@ -63,3 +63,7 @@ exports.progressBar = total => {
|
|||
bar.start(total, 0)
|
||||
return bar
|
||||
}
|
||||
|
||||
exports.checkSlashesInUrl = url => {
|
||||
return url.replace(/(https?:\/\/)|(\/)+/g, "$1$2")
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue