Merge pull request #8378 from Budibase/feature/app-backups-processing

Move backup processing into pro
This commit is contained in:
Michael Drury 2022-10-24 16:11:37 +01:00 committed by GitHub
commit b7e1445477
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 14 additions and 148 deletions

View File

@ -38,6 +38,7 @@ import {
import { watch } from "./watch"
import { initialise as initialiseWebsockets } from "./websocket"
import sdk from "./sdk"
import * as pro from "@budibase/pro"
const app = new Koa()
@ -103,13 +104,25 @@ server.on("close", async () => {
}
})
const initPro = async () => {
await pro.init({
backups: {
processing: {
exportAppFn: sdk.backups.exportApp,
importAppFn: sdk.backups.importApp,
statsFn: sdk.backups.calculateBackupStats,
},
},
})
}
module.exports = server.listen(env.PORT || 0, async () => {
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
env._set("PORT", server.address().port)
eventEmitter.emitPort(env.PORT)
fileSystem.init()
await redis.init()
await sdk.backups.init()
await initPro()
// run migrations on startup if not done via http
// not recommended in a clustered environment

View File

@ -1,145 +0,0 @@
import { backups } from "@budibase/pro"
import { db as dbCore, objectStore, tenancy } from "@budibase/backend-core"
import {
AppBackupQueueData,
AppBackupStatus,
AppBackupTrigger,
AppBackupType,
} from "@budibase/types"
import { exportApp } from "./exports"
import { importApp } from "./imports"
import { calculateBackupStats } from "./statistics"
import { Job } from "bull"
import fs from "fs"
import env from "../../../environment"
type BackupOpts = {
doc?: { id: string; rev: string }
createdBy?: string
}
async function removeExistingApp(devId: string) {
const devDb = dbCore.dangerousGetDB(devId, { skip_setup: true })
await devDb.destroy()
}
async function runBackup(
name: string,
trigger: AppBackupTrigger,
tenantId: string,
appId: string,
opts?: BackupOpts
) {
const devAppId = dbCore.getDevAppID(appId),
prodAppId = dbCore.getProdAppID(appId)
const timestamp = new Date().toISOString()
const tarPath = await exportApp(devAppId, { tar: true })
const contents = await calculateBackupStats(devAppId)
let filename = `${prodAppId}/backup-${timestamp}.tar.gz`
// add the tenant to the bucket path if backing up within a multi-tenant environment
if (env.MULTI_TENANCY) {
filename = `${tenantId}/${filename}`
}
const bucket = objectStore.ObjectStoreBuckets.BACKUPS
await objectStore.upload({
path: tarPath,
type: "application/gzip",
bucket,
filename,
metadata: {
name,
trigger,
timestamp,
appId: prodAppId,
},
})
if (opts?.doc) {
await backups.updateBackupStatus(
opts.doc.id,
opts.doc.rev,
AppBackupStatus.COMPLETE,
contents,
filename
)
} else {
await backups.storeAppBackupMetadata(
{
appId: prodAppId,
timestamp,
name,
trigger,
type: AppBackupType.BACKUP,
status: AppBackupStatus.COMPLETE,
contents,
createdBy: opts?.createdBy,
},
{ filename }
)
}
// clear up the tarball after uploading it
fs.rmSync(tarPath)
}
async function importProcessor(job: Job) {
const data: AppBackupQueueData = job.data
const appId = data.appId,
backupId = data.import!.backupId,
nameForBackup = data.import!.nameForBackup,
createdBy = data.import!.createdBy
const tenantId = tenancy.getTenantIDFromAppID(appId) as string
tenancy.doInTenant(tenantId, async () => {
const devAppId = dbCore.getDevAppID(appId)
const { rev } = await backups.updateRestoreStatus(
data.docId,
data.docRev,
AppBackupStatus.PENDING
)
// initially export the current state to disk - incase something goes wrong
await runBackup(
nameForBackup,
AppBackupTrigger.RESTORING,
tenantId,
appId,
{ createdBy }
)
// get the backup ready on disk
const { path } = await backups.downloadAppBackup(backupId)
// start by removing app database and contents of bucket - which will be updated
await removeExistingApp(devAppId)
let status = AppBackupStatus.COMPLETE
try {
await importApp(devAppId, dbCore.dangerousGetDB(devAppId), {
file: {
type: "application/gzip",
path,
},
key: path,
})
} catch (err) {
status = AppBackupStatus.FAILED
}
await backups.updateRestoreStatus(data.docId, rev, status)
})
}
async function exportProcessor(job: Job) {
const data: AppBackupQueueData = job.data
const appId = data.appId,
trigger = data.export!.trigger,
name = data.export!.name || `${trigger} - backup`
const tenantId = tenancy.getTenantIDFromAppID(appId) as string
await tenancy.doInTenant(tenantId, async () => {
const { rev } = await backups.updateBackupStatus(
data.docId,
data.docRev,
AppBackupStatus.PENDING
)
return runBackup(name, trigger, tenantId, appId, {
doc: { id: data.docId, rev },
})
})
}
export async function init() {
await backups.addAppBackupProcessors(importProcessor, exportProcessor)
}

View File

@ -1,11 +1,9 @@
import * as exportApps from "./exports"
import * as importApps from "./imports"
import * as backup from "./backup"
import * as statistics from "./statistics"
export default {
...exportApps,
...importApps,
...backup,
...statistics,
}