Export to tarball through tmp.
This commit is contained in:
parent
bb65cf8ef9
commit
a41b362040
|
@ -1,24 +1,9 @@
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
|
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
|
||||||
const { sendTempFile, readFileSync } = require("../../utilities/fileSystem")
|
const { streamFile } = require("../../utilities/fileSystem")
|
||||||
const { stringToReadStream } = require("../../utilities")
|
const { DocumentType, isDevAppID } = require("../../db/utils")
|
||||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
|
||||||
const { create } = require("./application")
|
|
||||||
const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils")
|
|
||||||
const sdk = require("../../sdk")
|
const sdk = require("../../sdk")
|
||||||
|
|
||||||
async function createApp(appName, appImport) {
|
|
||||||
const ctx = {
|
|
||||||
request: {
|
|
||||||
body: {
|
|
||||||
templateString: appImport,
|
|
||||||
name: appName,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
return create(ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.exportApps = async ctx => {
|
exports.exportApps = async ctx => {
|
||||||
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
||||||
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
||||||
|
@ -27,29 +12,18 @@ exports.exportApps = async ctx => {
|
||||||
const globalDBString = await sdk.apps.exports.exportDB(getGlobalDBName(), {
|
const globalDBString = await sdk.apps.exports.exportDB(getGlobalDBName(), {
|
||||||
filter: doc => !doc._id.startsWith(DocumentType.USER),
|
filter: doc => !doc._id.startsWith(DocumentType.USER),
|
||||||
})
|
})
|
||||||
let allDBs = {
|
// only export the dev apps as they will be the latest, the user can republish the apps
|
||||||
global: globalDBString,
|
// in their self-hosted environment
|
||||||
}
|
let appIds = apps
|
||||||
for (let app of apps) {
|
.map(app => app.appId || app._id)
|
||||||
const appId = app.appId || app._id
|
.filter(appId => isDevAppID(appId))
|
||||||
// only export the dev apps as they will be the latest, the user can republish the apps
|
const tmpPath = await sdk.apps.exports.exportMultipleApps(
|
||||||
// in their self hosted environment
|
appIds,
|
||||||
if (isDevAppID(appId)) {
|
globalDBString
|
||||||
allDBs[app.name] = await sdk.apps.exports.exportApp(appId)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const filename = `cloud-export-${new Date().getTime()}.txt`
|
|
||||||
ctx.attachment(filename)
|
|
||||||
ctx.body = sendTempFile(JSON.stringify(allDBs))
|
|
||||||
}
|
|
||||||
|
|
||||||
async function getAllDocType(db, docType) {
|
|
||||||
const response = await db.allDocs(
|
|
||||||
getDocParams(docType, null, {
|
|
||||||
include_docs: true,
|
|
||||||
})
|
|
||||||
)
|
)
|
||||||
return response.rows.map(row => row.doc)
|
const filename = `cloud-export-${new Date().getTime()}.tar.gz`
|
||||||
|
ctx.attachment(filename)
|
||||||
|
ctx.body = streamFile(tmpPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function hasBeenImported() {
|
async function hasBeenImported() {
|
||||||
|
@ -77,30 +51,51 @@ exports.importApps = async ctx => {
|
||||||
"Import file is required and environment must be fresh to import apps."
|
"Import file is required and environment must be fresh to import apps."
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
const importFile = ctx.request.files.importFile
|
|
||||||
const importString = readFileSync(importFile.path)
|
|
||||||
const dbs = JSON.parse(importString)
|
|
||||||
const globalDbImport = dbs.global
|
|
||||||
// remove from the list of apps
|
|
||||||
delete dbs.global
|
|
||||||
const globalDb = getGlobalDB()
|
|
||||||
// load the global db first
|
|
||||||
await globalDb.load(stringToReadStream(globalDbImport))
|
|
||||||
for (let [appName, appImport] of Object.entries(dbs)) {
|
|
||||||
await createApp(appName, appImport)
|
|
||||||
}
|
|
||||||
|
|
||||||
// if there are any users make sure to remove them
|
// TODO: IMPLEMENT TARBALL EXTRACTION, APP IMPORT, ATTACHMENT IMPORT AND GLOBAL DB IMPORT
|
||||||
let users = await getAllDocType(globalDb, DocumentType.USER)
|
// async function getAllDocType(db, docType) {
|
||||||
let userDeletionPromises = []
|
// const response = await db.allDocs(
|
||||||
for (let user of users) {
|
// getDocParams(docType, null, {
|
||||||
userDeletionPromises.push(globalDb.remove(user._id, user._rev))
|
// include_docs: true,
|
||||||
}
|
// })
|
||||||
if (userDeletionPromises.length > 0) {
|
// )
|
||||||
await Promise.all(userDeletionPromises)
|
// return response.rows.map(row => row.doc)
|
||||||
}
|
// }
|
||||||
|
// async function createApp(appName, appImport) {
|
||||||
await globalDb.bulkDocs(users)
|
// const ctx = {
|
||||||
|
// request: {
|
||||||
|
// body: {
|
||||||
|
// templateString: appImport,
|
||||||
|
// name: appName,
|
||||||
|
// },
|
||||||
|
// },
|
||||||
|
// }
|
||||||
|
// return create(ctx)
|
||||||
|
// }
|
||||||
|
// const importFile = ctx.request.files.importFile
|
||||||
|
// const importString = readFileSync(importFile.path)
|
||||||
|
// const dbs = JSON.parse(importString)
|
||||||
|
// const globalDbImport = dbs.global
|
||||||
|
// // remove from the list of apps
|
||||||
|
// delete dbs.global
|
||||||
|
// const globalDb = getGlobalDB()
|
||||||
|
// // load the global db first
|
||||||
|
// await globalDb.load(stringToReadStream(globalDbImport))
|
||||||
|
// for (let [appName, appImport] of Object.entries(dbs)) {
|
||||||
|
// await createApp(appName, appImport)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // if there are any users make sure to remove them
|
||||||
|
// let users = await getAllDocType(globalDb, DocumentType.USER)
|
||||||
|
// let userDeletionPromises = []
|
||||||
|
// for (let user of users) {
|
||||||
|
// userDeletionPromises.push(globalDb.remove(user._id, user._rev))
|
||||||
|
// }
|
||||||
|
// if (userDeletionPromises.length > 0) {
|
||||||
|
// await Promise.all(userDeletionPromises)
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// await globalDb.bulkDocs(users)
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
message: "Apps successfully imported.",
|
message: "Apps successfully imported.",
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
||||||
import {
|
import { retrieveDirectory } from "../../utilities/fileSystem/utilities"
|
||||||
streamUpload,
|
import { streamFile } from "../../utilities/fileSystem"
|
||||||
retrieveDirectory,
|
|
||||||
} from "../../utilities/fileSystem/utilities"
|
|
||||||
import { ObjectStoreBuckets, ATTACHMENT_PATH } from "../../constants"
|
import { ObjectStoreBuckets, ATTACHMENT_PATH } from "../../constants"
|
||||||
import {
|
import {
|
||||||
LINK_USER_METADATA_PREFIX,
|
LINK_USER_METADATA_PREFIX,
|
||||||
|
@ -11,10 +9,35 @@ import {
|
||||||
USER_METDATA_PREFIX,
|
USER_METDATA_PREFIX,
|
||||||
} from "../../db/utils"
|
} from "../../db/utils"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import env from "../../environment"
|
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
|
const uuid = require("uuid/v4")
|
||||||
|
const tar = require("tar")
|
||||||
const MemoryStream = require("memorystream")
|
const MemoryStream = require("memorystream")
|
||||||
|
|
||||||
|
const DB_EXPORT_FILE = "db.txt"
|
||||||
|
const GLOBAL_DB_EXPORT_FILE = "global.txt"
|
||||||
|
type ExportOpts = {
|
||||||
|
filter?: any
|
||||||
|
exportPath?: string
|
||||||
|
tar?: boolean
|
||||||
|
excludeRows?: boolean
|
||||||
|
}
|
||||||
|
|
||||||
|
function tarFiles(cwd: string, files: string[], exportName?: string) {
|
||||||
|
exportName = exportName ? `${exportName}.tar.gz` : "export.tar.gz"
|
||||||
|
tar.create(
|
||||||
|
{
|
||||||
|
sync: true,
|
||||||
|
gzip: true,
|
||||||
|
file: exportName,
|
||||||
|
recursive: true,
|
||||||
|
cwd,
|
||||||
|
},
|
||||||
|
files
|
||||||
|
)
|
||||||
|
return join(cwd, exportName)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Exports a DB to either file or a variable (memory).
|
* Exports a DB to either file or a variable (memory).
|
||||||
* @param {string} dbName the DB which is to be exported.
|
* @param {string} dbName the DB which is to be exported.
|
||||||
|
@ -22,36 +45,13 @@ const MemoryStream = require("memorystream")
|
||||||
* a filter function or the name of the export.
|
* a filter function or the name of the export.
|
||||||
* @return {*} either a readable stream or a string
|
* @return {*} either a readable stream or a string
|
||||||
*/
|
*/
|
||||||
export async function exportDB(
|
export async function exportDB(dbName: string, opts: ExportOpts = {}) {
|
||||||
dbName: string,
|
|
||||||
opts: { stream?: boolean; filter?: any; exportName?: string } = {}
|
|
||||||
) {
|
|
||||||
// streaming a DB dump is a bit more complicated, can't close DB
|
|
||||||
if (opts?.stream) {
|
|
||||||
const db = dbCore.dangerousGetDB(dbName)
|
|
||||||
const memStream = new MemoryStream()
|
|
||||||
memStream.on("end", async () => {
|
|
||||||
await dbCore.closeDB(db)
|
|
||||||
})
|
|
||||||
db.dump(memStream, { filter: opts?.filter })
|
|
||||||
return memStream
|
|
||||||
}
|
|
||||||
|
|
||||||
return dbCore.doWithDB(dbName, async (db: any) => {
|
return dbCore.doWithDB(dbName, async (db: any) => {
|
||||||
// Write the dump to file if required
|
// Write the dump to file if required
|
||||||
if (opts?.exportName) {
|
if (opts?.exportPath) {
|
||||||
const path = join(budibaseTempDir(), opts?.exportName)
|
const path = opts?.exportPath
|
||||||
const writeStream = fs.createWriteStream(path)
|
const writeStream = fs.createWriteStream(path)
|
||||||
await db.dump(writeStream, { filter: opts?.filter })
|
await db.dump(writeStream, { filter: opts?.filter })
|
||||||
|
|
||||||
// Upload the dump to the object store if self-hosted
|
|
||||||
if (env.SELF_HOSTED) {
|
|
||||||
await streamUpload(
|
|
||||||
ObjectStoreBuckets.BACKUPS,
|
|
||||||
join(dbName, opts?.exportName),
|
|
||||||
fs.createReadStream(path)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
return fs.createReadStream(path)
|
return fs.createReadStream(path)
|
||||||
} else {
|
} else {
|
||||||
// Stringify the dump in memory if required
|
// Stringify the dump in memory if required
|
||||||
|
@ -79,24 +79,57 @@ function defineFilter(excludeRows?: boolean) {
|
||||||
* Local utility to back up the database state for an app, excluding global user
|
* Local utility to back up the database state for an app, excluding global user
|
||||||
* data or user relationships.
|
* data or user relationships.
|
||||||
* @param {string} appId The app to back up
|
* @param {string} appId The app to back up
|
||||||
* @param {object} config Config to send to export DB
|
* @param {object} config Config to send to export DB/attachment export
|
||||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
|
||||||
* @returns {*} either a string or a stream of the backup
|
* @returns {*} either a string or a stream of the backup
|
||||||
*/
|
*/
|
||||||
export async function exportApp(
|
export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
appId: string,
|
const prodAppId = dbCore.getProdAppID(appId)
|
||||||
config?: any,
|
const attachmentsPath = `${prodAppId}/${ATTACHMENT_PATH}`
|
||||||
excludeRows?: boolean
|
// export attachments to tmp
|
||||||
) {
|
|
||||||
const attachmentsPath = `${dbCore.getProdAppID(appId)}/${ATTACHMENT_PATH}`
|
|
||||||
const tmpPath = await retrieveDirectory(
|
const tmpPath = await retrieveDirectory(
|
||||||
ObjectStoreBuckets.APPS,
|
ObjectStoreBuckets.APPS,
|
||||||
attachmentsPath
|
attachmentsPath
|
||||||
)
|
)
|
||||||
|
// move out of app directory, simplify structure
|
||||||
|
fs.renameSync(join(tmpPath, attachmentsPath), join(tmpPath, ATTACHMENT_PATH))
|
||||||
|
// remove the old app directory created by object export
|
||||||
|
fs.rmdirSync(join(tmpPath, prodAppId))
|
||||||
|
// enforce an export of app DB to the tmp path
|
||||||
|
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
||||||
await exportDB(appId, {
|
await exportDB(appId, {
|
||||||
...config,
|
...config,
|
||||||
filter: defineFilter(excludeRows),
|
filter: defineFilter(config?.excludeRows),
|
||||||
|
exportPath: dbPath,
|
||||||
})
|
})
|
||||||
|
// if tar requested, return where the tarball is
|
||||||
|
if (config?.tar) {
|
||||||
|
// now the tmpPath contains both the DB export and attachments, tar this
|
||||||
|
return tarFiles(tmpPath, [ATTACHMENT_PATH, DB_EXPORT_FILE])
|
||||||
|
}
|
||||||
|
// tar not requested, turn the directory where export is
|
||||||
|
else {
|
||||||
|
return tmpPath
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function exportMultipleApps(
|
||||||
|
appIds: string[],
|
||||||
|
globalDbContents?: string
|
||||||
|
) {
|
||||||
|
const tmpPath = join(budibaseTempDir(), uuid())
|
||||||
|
let exportPromises: Promise<void>[] = []
|
||||||
|
const exportAndMove = async (appId: string) => {
|
||||||
|
const path = await exportApp(appId)
|
||||||
|
await fs.promises.rename(path, join(tmpPath, appId))
|
||||||
|
}
|
||||||
|
for (let appId of appIds) {
|
||||||
|
exportPromises.push(exportAndMove(appId))
|
||||||
|
}
|
||||||
|
await Promise.all(exportPromises)
|
||||||
|
if (globalDbContents) {
|
||||||
|
fs.writeFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), globalDbContents)
|
||||||
|
}
|
||||||
|
return tarFiles(tmpPath, [...appIds, GLOBAL_DB_EXPORT_FILE])
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -106,5 +139,6 @@ export async function exportApp(
|
||||||
* @returns {*} a readable stream of the backup which is written in real time
|
* @returns {*} a readable stream of the backup which is written in real time
|
||||||
*/
|
*/
|
||||||
export async function streamExportApp(appId: string, excludeRows: boolean) {
|
export async function streamExportApp(appId: string, excludeRows: boolean) {
|
||||||
return await exportApp(appId, { stream: true }, excludeRows)
|
const tmpPath = await exportApp(appId, { excludeRows, tar: true })
|
||||||
|
return streamFile(tmpPath)
|
||||||
}
|
}
|
||||||
|
|
|
@ -112,6 +112,10 @@ exports.apiFileReturn = contents => {
|
||||||
return fs.createReadStream(path)
|
return fs.createReadStream(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exports.streamFile = path => {
|
||||||
|
return fs.createReadStream(path)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Writes the provided contents to a temporary file, which can be used briefly.
|
* Writes the provided contents to a temporary file, which can be used briefly.
|
||||||
* @param {string} fileContents contents which will be written to a temp file.
|
* @param {string} fileContents contents which will be written to a temp file.
|
||||||
|
|
Loading…
Reference in New Issue