Reformatting types to allow queue to be used for import and export.
This commit is contained in:
parent
8003f8b283
commit
f795cb0e33
|
@ -5,37 +5,41 @@ import { Job } from "bull"
|
|||
import fs from "fs"
|
||||
import env from "../../../environment"
|
||||
|
||||
export async function init() {
|
||||
await backups.addAppBackupProcessor(async (job: Job) => {
|
||||
const appId = job.data.appId,
|
||||
trigger = job.data.trigger,
|
||||
name = job.data.name
|
||||
const tenantId = tenancy.getTenantIDFromAppID(appId)
|
||||
await tenancy.doInTenant(tenantId, async () => {
|
||||
const createdAt = new Date().toISOString()
|
||||
const tarPath = await exportApp(appId, { tar: true })
|
||||
let filename = `${appId}/backup-${createdAt}.tar.gz`
|
||||
// add the tenant to the bucket path if backing up within a multi-tenant environment
|
||||
if (env.MULTI_TENANCY) {
|
||||
filename = `${tenantId}/${filename}`
|
||||
}
|
||||
const bucket = objectStore.ObjectStoreBuckets.BACKUPS
|
||||
const metadata = {
|
||||
appId,
|
||||
createdAt,
|
||||
trigger,
|
||||
name,
|
||||
}
|
||||
await objectStore.upload({
|
||||
path: tarPath,
|
||||
type: "application/gzip",
|
||||
bucket,
|
||||
filename,
|
||||
metadata,
|
||||
})
|
||||
await backups.storeAppBackupMetadata(filename, metadata)
|
||||
// clear up the tarball after uploading it
|
||||
fs.rmSync(tarPath)
|
||||
async function importProcessor(job: Job) {}
|
||||
|
||||
async function exportProcessor(job: Job) {
|
||||
const appId = job.data.appId,
|
||||
trigger = job.data.trigger,
|
||||
name = job.data.name
|
||||
const tenantId = tenancy.getTenantIDFromAppID(appId)
|
||||
await tenancy.doInTenant(tenantId, async () => {
|
||||
const createdAt = new Date().toISOString()
|
||||
const tarPath = await exportApp(appId, { tar: true })
|
||||
let filename = `${appId}/backup-${createdAt}.tar.gz`
|
||||
// add the tenant to the bucket path if backing up within a multi-tenant environment
|
||||
if (env.MULTI_TENANCY) {
|
||||
filename = `${tenantId}/${filename}`
|
||||
}
|
||||
const bucket = objectStore.ObjectStoreBuckets.BACKUPS
|
||||
const metadata = {
|
||||
appId,
|
||||
createdAt,
|
||||
trigger,
|
||||
name,
|
||||
}
|
||||
await objectStore.upload({
|
||||
path: tarPath,
|
||||
type: "application/gzip",
|
||||
bucket,
|
||||
filename,
|
||||
metadata,
|
||||
})
|
||||
await backups.storeAppBackupMetadata(filename, metadata)
|
||||
// clear up the tarball after uploading it
|
||||
fs.rmSync(tarPath)
|
||||
})
|
||||
}
|
||||
|
||||
export async function init() {
|
||||
await backups.addAppBackupProcessors(importProcessor, exportProcessor)
|
||||
}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import { APP_PREFIX, TABLE_ROW_PREFIX } from "../../../db/utils"
|
||||
import { TABLE_ROW_PREFIX } from "../../../db/utils"
|
||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||
import {
|
||||
DB_EXPORT_FILE,
|
||||
|
|
|
@ -6,6 +6,11 @@ export enum AppBackupTrigger {
|
|||
SCHEDULED = "scheduled",
|
||||
}
|
||||
|
||||
export enum AppBackupEventType {
|
||||
EXPORT = "export",
|
||||
IMPORT = "import",
|
||||
}
|
||||
|
||||
export interface AppBackup extends Document {
|
||||
trigger: AppBackupTrigger
|
||||
name: string
|
||||
|
@ -31,12 +36,22 @@ export type AppBackupFetchOpts = {
|
|||
}
|
||||
|
||||
export interface AppBackupQueueData {
|
||||
trigger: AppBackupTrigger
|
||||
createdBy?: string
|
||||
name?: string
|
||||
eventType: AppBackupEventType
|
||||
appId: string
|
||||
export?: {
|
||||
trigger: AppBackupTrigger
|
||||
name?: string
|
||||
createdBy?: string
|
||||
}
|
||||
import?: {
|
||||
backupId: string
|
||||
}
|
||||
}
|
||||
|
||||
export interface AppBackupMetadata extends AppBackupQueueData {
|
||||
export interface AppBackupMetadata {
|
||||
appId: string
|
||||
trigger: AppBackupTrigger
|
||||
name?: string
|
||||
createdBy?: string
|
||||
createdAt: string
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue