Getting the import system to carry out a backup of the app before the restore.

This commit is contained in:
mike12345567 2022-10-20 19:07:10 +01:00
parent 0478d9b154
commit 4716eaaab5
2 changed files with 93 additions and 54 deletions

View File

@ -1,6 +1,11 @@
import { backups } from "@budibase/pro" import { backups } from "@budibase/pro"
import { db as dbCore, objectStore, tenancy } from "@budibase/backend-core" import { db as dbCore, objectStore, tenancy } from "@budibase/backend-core"
import { AppBackupQueueData, AppBackupStatus } from "@budibase/types" import {
AppBackupQueueData,
AppBackupStatus,
AppBackupTrigger,
AppBackupType,
} from "@budibase/types"
import { exportApp } from "./exports" import { exportApp } from "./exports"
import { importApp } from "./imports" import { importApp } from "./imports"
import { calculateBackupStats } from "../statistics" import { calculateBackupStats } from "../statistics"
@ -8,56 +13,23 @@ import { Job } from "bull"
import fs from "fs" import fs from "fs"
import env from "../../../environment" import env from "../../../environment"
type BackupOpts = {
doc?: { id: string; rev: string }
createdBy?: string
}
async function removeExistingApp(devId: string) { async function removeExistingApp(devId: string) {
const devDb = dbCore.dangerousGetDB(devId, { skip_setup: true }) const devDb = dbCore.dangerousGetDB(devId, { skip_setup: true })
await devDb.destroy() await devDb.destroy()
} }
async function importProcessor(job: Job) { async function runBackup(
const data: AppBackupQueueData = job.data name: string,
const appId = data.appId, trigger: AppBackupTrigger,
backupId = data.import!.backupId tenantId: string,
const tenantId = tenancy.getTenantIDFromAppID(appId) appId: string,
tenancy.doInTenant(tenantId, async () => { opts?: BackupOpts
const devAppId = dbCore.getDevAppID(appId) ) {
const performImport = async (path: string) => {
await importApp(devAppId, dbCore.dangerousGetDB(devAppId), {
file: {
type: "application/gzip",
path,
},
key: path,
})
}
// initially export the current state to disk - incase something goes wrong
const backupTarPath = await exportApp(devAppId, { tar: true })
// get the backup ready on disk
const { path } = await backups.downloadAppBackup(backupId)
// start by removing app database and contents of bucket - which will be updated
await removeExistingApp(devAppId)
try {
await performImport(path)
} catch (err) {
// rollback - clear up failed import and re-import the pre-backup
await removeExistingApp(devAppId)
await performImport(backupTarPath)
}
await backups.updateRestoreStatus(
data.docId,
data.docRev,
AppBackupStatus.COMPLETE
)
fs.rmSync(backupTarPath)
})
}
async function exportProcessor(job: Job) {
const data: AppBackupQueueData = job.data
const appId = data.appId,
trigger = data.export!.trigger,
name = data.export!.name || `${trigger} - backup`
const tenantId = tenancy.getTenantIDFromAppID(appId)
await tenancy.doInTenant(tenantId, async () => {
const devAppId = dbCore.getDevAppID(appId), const devAppId = dbCore.getDevAppID(appId),
prodAppId = dbCore.getProdAppID(appId) prodAppId = dbCore.getProdAppID(appId)
const timestamp = new Date().toISOString() const timestamp = new Date().toISOString()
@ -81,15 +53,80 @@ async function exportProcessor(job: Job) {
appId: prodAppId, appId: prodAppId,
}, },
}) })
if (opts?.doc) {
await backups.updateBackupStatus( await backups.updateBackupStatus(
data.docId, opts.doc.id,
data.docRev, opts.doc.rev,
AppBackupStatus.COMPLETE, AppBackupStatus.COMPLETE,
contents, contents,
filename filename
) )
} else {
await backups.storeAppBackupMetadata(
{
appId: prodAppId,
timestamp,
name,
trigger,
type: AppBackupType.BACKUP,
status: AppBackupStatus.COMPLETE,
contents,
createdBy: opts?.createdBy,
},
{ filename }
)
}
// clear up the tarball after uploading it // clear up the tarball after uploading it
fs.rmSync(tarPath) fs.rmSync(tarPath)
}
async function importProcessor(job: Job) {
const data: AppBackupQueueData = job.data
const appId = data.appId,
backupId = data.import!.backupId,
nameForBackup = data.import!.nameForBackup,
createdBy = data.import!.createdBy
const tenantId = tenancy.getTenantIDFromAppID(appId) as string
tenancy.doInTenant(tenantId, async () => {
const devAppId = dbCore.getDevAppID(appId)
// initially export the current state to disk - incase something goes wrong
await runBackup(
nameForBackup,
AppBackupTrigger.RESTORING,
tenantId,
appId,
{ createdBy }
)
// get the backup ready on disk
const { path } = await backups.downloadAppBackup(backupId)
// start by removing app database and contents of bucket - which will be updated
await removeExistingApp(devAppId)
let status = AppBackupStatus.COMPLETE
try {
await importApp(devAppId, dbCore.dangerousGetDB(devAppId), {
file: {
type: "application/gzip",
path,
},
key: path,
})
} catch (err) {
status = AppBackupStatus.FAILED
}
await backups.updateRestoreStatus(data.docId, data.docRev, status)
})
}
async function exportProcessor(job: Job) {
const data: AppBackupQueueData = job.data
const appId = data.appId,
trigger = data.export!.trigger,
name = data.export!.name || `${trigger} - backup`
const tenantId = tenancy.getTenantIDFromAppID(appId) as string
await tenancy.doInTenant(tenantId, async () => {
return runBackup(name, trigger, tenantId, appId, {
doc: { id: data.docId, rev: data.docRev },
})
}) })
} }

View File

@ -16,6 +16,7 @@ export enum AppBackupTrigger {
PUBLISH = "publish", PUBLISH = "publish",
MANUAL = "manual", MANUAL = "manual",
SCHEDULED = "scheduled", SCHEDULED = "scheduled",
RESTORING = "restoring",
} }
export interface AppBackupContents { export interface AppBackupContents {
@ -59,6 +60,7 @@ export interface AppBackupQueueData {
} }
import?: { import?: {
backupId: string backupId: string
nameForBackup: string
createdBy?: string createdBy?: string
} }
} }