Some fixes for #8770 - the CLI was very memory intensive when working with large databases, that contained many revisions. We don't need the revisions for app exports/backups, therefore I've updated our export systems to drop the revision history as it compacts the database significantly and speeds up exports/reduces memory usage.
This commit is contained in:
parent
3b43943f1f
commit
4d86df057b
|
@ -48,7 +48,7 @@
|
||||||
"posthog-node": "1.3.0",
|
"posthog-node": "1.3.0",
|
||||||
"pouchdb": "7.3.0",
|
"pouchdb": "7.3.0",
|
||||||
"pouchdb-find": "7.2.2",
|
"pouchdb-find": "7.2.2",
|
||||||
"pouchdb-replication-stream": "1.2.9",
|
"@budibase/pouchdb-replication-stream": "1.2.10",
|
||||||
"redlock": "4.2.0",
|
"redlock": "4.2.0",
|
||||||
"sanitize-s3-objectkey": "0.0.1",
|
"sanitize-s3-objectkey": "0.0.1",
|
||||||
"semver": "7.3.7",
|
"semver": "7.3.7",
|
||||||
|
|
|
@ -39,7 +39,7 @@ export const getPouch = (opts: PouchOptions = {}) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (opts.replication) {
|
if (opts.replication) {
|
||||||
const replicationStream = require("pouchdb-replication-stream")
|
const replicationStream = require("@budibase/pouchdb-replication-stream")
|
||||||
PouchDB.plugin(replicationStream.plugin)
|
PouchDB.plugin(replicationStream.plugin)
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
|
PouchDB.adapter("writableStream", replicationStream.adapters.writableStream)
|
||||||
|
|
|
@ -310,10 +310,18 @@
|
||||||
qs "^6.11.0"
|
qs "^6.11.0"
|
||||||
tough-cookie "^4.1.2"
|
tough-cookie "^4.1.2"
|
||||||
|
|
||||||
"@budibase/types@2.2.12-alpha.62":
|
"@budibase/pouchdb-replication-stream@1.2.10":
|
||||||
version "2.2.12-alpha.62"
|
version "1.2.10"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.12-alpha.62.tgz#385ef000610d5c00b83cb2eafda2bd63c86b7f3f"
|
resolved "https://registry.yarnpkg.com/@budibase/pouchdb-replication-stream/-/pouchdb-replication-stream-1.2.10.tgz#4100df2effd7c823edadddcdbdc380f6827eebf5"
|
||||||
integrity sha512-idlhB4fSyBCEDWsVvQvdmN9Dg9VAEwxZ8TLE9pGnXIRZPg48MKXPNn5AUT9zv6cDlbQdlU2tFFF8st9b6lyLuw==
|
integrity sha512-1zeorOwbelZ7HF5vFB+pKE8Mnh31om8k1M6T3AZXVULYTHLsyJrMTozSv5CJ1P8ZfOIJab09HDzCXDh2icFekg==
|
||||||
|
dependencies:
|
||||||
|
argsarray "0.0.1"
|
||||||
|
inherits "^2.0.3"
|
||||||
|
lodash.pick "^4.0.0"
|
||||||
|
ndjson "^1.4.3"
|
||||||
|
pouch-stream "^0.4.0"
|
||||||
|
pouchdb-promise "^6.0.4"
|
||||||
|
through2 "^2.0.0"
|
||||||
|
|
||||||
"@cspotcode/source-map-support@^0.8.0":
|
"@cspotcode/source-map-support@^0.8.0":
|
||||||
version "0.8.1"
|
version "0.8.1"
|
||||||
|
@ -4726,19 +4734,6 @@ pouchdb-promise@^6.0.4:
|
||||||
dependencies:
|
dependencies:
|
||||||
lie "3.1.1"
|
lie "3.1.1"
|
||||||
|
|
||||||
pouchdb-replication-stream@1.2.9:
|
|
||||||
version "1.2.9"
|
|
||||||
resolved "https://registry.yarnpkg.com/pouchdb-replication-stream/-/pouchdb-replication-stream-1.2.9.tgz#aa4fa5d8f52df4825392f18e07c7e11acffc650a"
|
|
||||||
integrity sha512-hM8XRBfamTTUwRhKwLS/jSNouBhn9R/4ugdHNRD1EvJzwV8iImh6sDYbCU9PGuznjyOjXz6vpFRzKeI2KYfwnQ==
|
|
||||||
dependencies:
|
|
||||||
argsarray "0.0.1"
|
|
||||||
inherits "^2.0.3"
|
|
||||||
lodash.pick "^4.0.0"
|
|
||||||
ndjson "^1.4.3"
|
|
||||||
pouch-stream "^0.4.0"
|
|
||||||
pouchdb-promise "^6.0.4"
|
|
||||||
through2 "^2.0.0"
|
|
||||||
|
|
||||||
pouchdb-selector-core@7.2.2:
|
pouchdb-selector-core@7.2.2:
|
||||||
version "7.2.2"
|
version "7.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/pouchdb-selector-core/-/pouchdb-selector-core-7.2.2.tgz#264d7436a8c8ac3801f39960e79875ef7f3879a0"
|
resolved "https://registry.yarnpkg.com/pouchdb-selector-core/-/pouchdb-selector-core-7.2.2.tgz#264d7436a8c8ac3801f39960e79875ef7f3879a0"
|
||||||
|
|
|
@ -74,17 +74,17 @@ exports.getConfig = async (envFile = true) => {
|
||||||
return config
|
return config
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.replication = (from, to) => {
|
exports.replication = async (from, to) => {
|
||||||
return new Promise((resolve, reject) => {
|
const pouch = getPouch()
|
||||||
from.replicate
|
try {
|
||||||
.to(to)
|
await pouch.replicate(from, to, {
|
||||||
.on("complete", () => {
|
batch_size: 1000,
|
||||||
resolve()
|
batch_limit: 5,
|
||||||
})
|
style: "main_only",
|
||||||
.on("error", err => {
|
})
|
||||||
reject(err)
|
} catch (err) {
|
||||||
})
|
throw new Error(`Replication failed - ${JSON.stringify(err)}`)
|
||||||
})
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.getPouches = config => {
|
exports.getPouches = config => {
|
||||||
|
|
|
@ -45,12 +45,18 @@ function tarFilesToTmp(tmpDir: string, files: string[]) {
|
||||||
* @return {*} either a readable stream or a string
|
* @return {*} either a readable stream or a string
|
||||||
*/
|
*/
|
||||||
export async function exportDB(dbName: string, opts: ExportOpts = {}) {
|
export async function exportDB(dbName: string, opts: ExportOpts = {}) {
|
||||||
|
const exportOpts = {
|
||||||
|
filter: opts?.filter,
|
||||||
|
batch_size: 1000,
|
||||||
|
batch_limit: 5,
|
||||||
|
style: "main_only",
|
||||||
|
}
|
||||||
return dbCore.doWithDB(dbName, async (db: any) => {
|
return dbCore.doWithDB(dbName, async (db: any) => {
|
||||||
// Write the dump to file if required
|
// Write the dump to file if required
|
||||||
if (opts?.exportPath) {
|
if (opts?.exportPath) {
|
||||||
const path = opts?.exportPath
|
const path = opts?.exportPath
|
||||||
const writeStream = fs.createWriteStream(path)
|
const writeStream = fs.createWriteStream(path)
|
||||||
await db.dump(writeStream, { filter: opts?.filter })
|
await db.dump(writeStream, exportOpts)
|
||||||
return path
|
return path
|
||||||
} else {
|
} else {
|
||||||
// Stringify the dump in memory if required
|
// Stringify the dump in memory if required
|
||||||
|
@ -59,7 +65,7 @@ export async function exportDB(dbName: string, opts: ExportOpts = {}) {
|
||||||
memStream.on("data", (chunk: any) => {
|
memStream.on("data", (chunk: any) => {
|
||||||
appString += chunk.toString()
|
appString += chunk.toString()
|
||||||
})
|
})
|
||||||
await db.dump(memStream, { filter: opts?.filter })
|
await db.dump(memStream, exportOpts)
|
||||||
return appString
|
return appString
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
Loading…
Reference in New Issue