2022-10-14 20:24:03 +02:00
|
|
|
import { backups } from "@budibase/pro"
|
|
|
|
import { objectStore, tenancy } from "@budibase/backend-core"
|
|
|
|
import { exportApp } from "./exports"
|
|
|
|
import { Job } from "bull"
|
|
|
|
import fs from "fs"
|
|
|
|
import env from "../../../environment"
|
|
|
|
|
2022-10-17 20:42:36 +02:00
|
|
|
async function importProcessor(job: Job) {}
|
|
|
|
|
|
|
|
async function exportProcessor(job: Job) {
|
|
|
|
const appId = job.data.appId,
|
|
|
|
trigger = job.data.trigger,
|
|
|
|
name = job.data.name
|
|
|
|
const tenantId = tenancy.getTenantIDFromAppID(appId)
|
|
|
|
await tenancy.doInTenant(tenantId, async () => {
|
|
|
|
const createdAt = new Date().toISOString()
|
|
|
|
const tarPath = await exportApp(appId, { tar: true })
|
|
|
|
let filename = `${appId}/backup-${createdAt}.tar.gz`
|
|
|
|
// add the tenant to the bucket path if backing up within a multi-tenant environment
|
|
|
|
if (env.MULTI_TENANCY) {
|
|
|
|
filename = `${tenantId}/${filename}`
|
|
|
|
}
|
|
|
|
const bucket = objectStore.ObjectStoreBuckets.BACKUPS
|
|
|
|
const metadata = {
|
|
|
|
appId,
|
|
|
|
createdAt,
|
|
|
|
trigger,
|
|
|
|
name,
|
|
|
|
}
|
|
|
|
await objectStore.upload({
|
|
|
|
path: tarPath,
|
|
|
|
type: "application/gzip",
|
|
|
|
bucket,
|
|
|
|
filename,
|
|
|
|
metadata,
|
2022-10-14 20:24:03 +02:00
|
|
|
})
|
2022-10-17 20:42:36 +02:00
|
|
|
await backups.storeAppBackupMetadata(filename, metadata)
|
|
|
|
// clear up the tarball after uploading it
|
|
|
|
fs.rmSync(tarPath)
|
2022-10-14 20:24:03 +02:00
|
|
|
})
|
|
|
|
}
|
2022-10-17 20:42:36 +02:00
|
|
|
|
|
|
|
export async function init() {
|
|
|
|
await backups.addAppBackupProcessors(importProcessor, exportProcessor)
|
|
|
|
}
|