Main types and work for the CRUD operations of app backup backend in pro + the listeners to handle exporting apps from the server.
This commit is contained in:
parent
b702c7482a
commit
0bd2a18e46
|
@ -63,6 +63,7 @@
|
|||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/bull": "^3.15.9",
|
||||
"@types/chance": "1.1.3",
|
||||
"@types/ioredis": "^4.28.10",
|
||||
"@types/jest": "27.5.1",
|
||||
|
|
|
@ -21,6 +21,7 @@ export enum ViewName {
|
|||
ACCOUNT_BY_EMAIL = "account_by_email",
|
||||
PLATFORM_USERS_LOWERCASE = "platform_users_lowercase",
|
||||
USER_BY_GROUP = "by_group_user",
|
||||
APP_BACKUP_BY_TRIGGER = "by_trigger",
|
||||
}
|
||||
|
||||
export const DeprecatedViews = {
|
||||
|
@ -49,6 +50,7 @@ export enum DocumentType {
|
|||
TABLE = "ta",
|
||||
DATASOURCE = "datasource",
|
||||
DATASOURCE_PLUS = "datasource_plus",
|
||||
APP_BACKUP = "backup",
|
||||
}
|
||||
|
||||
export const StaticDatabases = {
|
||||
|
|
|
@ -27,6 +27,7 @@ const CONTENT_TYPE_MAP: any = {
|
|||
css: "text/css",
|
||||
js: "application/javascript",
|
||||
json: "application/json",
|
||||
gz: "application/gzip",
|
||||
}
|
||||
const STRING_CONTENT_TYPES = [
|
||||
CONTENT_TYPE_MAP.html,
|
||||
|
@ -149,7 +150,7 @@ export const upload = async ({
|
|||
type,
|
||||
metadata,
|
||||
}: any) => {
|
||||
const extension = [...filename.split(".")].pop()
|
||||
const extension = filename.split(".").pop()
|
||||
const fileBytes = fs.readFileSync(path)
|
||||
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
export enum JobQueue {
|
||||
AUTOMATIONS = "automationQueue",
|
||||
APP_BACKUPS = "appBackupQueue",
|
||||
AUTOMATION = "automationQueue",
|
||||
APP_BACKUP = "appBackupQueue",
|
||||
}
|
||||
|
|
|
@ -34,10 +34,10 @@ function handleStalled(queue: Queue, removeStalled?: StalledFn) {
|
|||
function logging(queue: Queue, jobQueue: JobQueue) {
|
||||
let eventType: string
|
||||
switch (jobQueue) {
|
||||
case JobQueue.AUTOMATIONS:
|
||||
case JobQueue.AUTOMATION:
|
||||
eventType = "automation-event"
|
||||
break
|
||||
case JobQueue.APP_BACKUPS:
|
||||
case JobQueue.APP_BACKUP:
|
||||
eventType = "app-backup-event"
|
||||
break
|
||||
}
|
||||
|
|
|
@ -16,10 +16,10 @@ async function cleanup() {
|
|||
}
|
||||
}
|
||||
|
||||
export function createQueue(
|
||||
export function createQueue<T>(
|
||||
jobQueue: JobQueue,
|
||||
removeStalled?: StalledFn
|
||||
): BullQueue.Queue {
|
||||
): BullQueue.Queue<T> {
|
||||
const queueConfig: any = redisProtocolUrl || { redis: opts }
|
||||
let queue: any
|
||||
if (env.isTest()) {
|
||||
|
|
|
@ -698,6 +698,14 @@
|
|||
"@types/connect" "*"
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/bull@^3.15.9":
|
||||
version "3.15.9"
|
||||
resolved "https://registry.yarnpkg.com/@types/bull/-/bull-3.15.9.tgz#e10e0901ec3762bff85716b3c580277960751c93"
|
||||
integrity sha512-MPUcyPPQauAmynoO3ezHAmCOhbB0pWmYyijr/5ctaCqhbKWsjW0YCod38ZcLzUBprosfZ9dPqfYIcfdKjk7RNQ==
|
||||
dependencies:
|
||||
"@types/ioredis" "*"
|
||||
"@types/redis" "^2.8.0"
|
||||
|
||||
"@types/chance@1.1.3":
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/chance/-/chance-1.1.3.tgz#d19fe9391288d60fdccd87632bfc9ab2b4523fea"
|
||||
|
@ -768,7 +776,7 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.2.tgz#7315b4c4c54f82d13fa61c228ec5c2ea5cc9e0e1"
|
||||
integrity sha512-EqX+YQxINb+MeXaIqYDASb6U6FCHbWjkj4a1CKDBks3d/QiB2+PqBLyO72vLDgAO1wUI4O+9gweRcQK11bTL/w==
|
||||
|
||||
"@types/ioredis@^4.28.10":
|
||||
"@types/ioredis@*", "@types/ioredis@^4.28.10":
|
||||
version "4.28.10"
|
||||
resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.10.tgz#40ceb157a4141088d1394bb87c98ed09a75a06ff"
|
||||
integrity sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ==
|
||||
|
|
|
@ -37,6 +37,7 @@ import {
|
|||
} from "./utilities/workerRequests"
|
||||
import { watch } from "./watch"
|
||||
import { initialise as initialiseWebsockets } from "./websocket"
|
||||
import sdk from "./sdk"
|
||||
|
||||
const app = new Koa()
|
||||
|
||||
|
@ -108,6 +109,7 @@ module.exports = server.listen(env.PORT || 0, async () => {
|
|||
eventEmitter.emitPort(env.PORT)
|
||||
fileSystem.init()
|
||||
await redis.init()
|
||||
await sdk.backups.init()
|
||||
|
||||
// run migrations on startup if not done via http
|
||||
// not recommended in a clustered environment
|
||||
|
|
|
@ -5,7 +5,7 @@ const { queue } = require("@budibase/backend-core")
|
|||
const automation = require("../threads/automation")
|
||||
|
||||
let automationQueue = queue.createQueue(
|
||||
queue.JobQueue.AUTOMATIONS,
|
||||
queue.JobQueue.AUTOMATION,
|
||||
automation.removeStalled
|
||||
)
|
||||
|
||||
|
|
|
@ -0,0 +1,39 @@
|
|||
import { backups } from "@budibase/pro"
|
||||
import { objectStore, tenancy } from "@budibase/backend-core"
|
||||
import { exportApp } from "./exports"
|
||||
import { Job } from "bull"
|
||||
import fs from "fs"
|
||||
import env from "../../../environment"
|
||||
|
||||
export async function init() {
|
||||
await backups.addAppBackupProcessor(async (job: Job) => {
|
||||
const appId = job.data.appId,
|
||||
trigger = job.data.trigger,
|
||||
name = job.data.name
|
||||
const createdAt = new Date().toISOString()
|
||||
const tarPath = await exportApp(appId, { tar: true })
|
||||
let filename = `${appId}/backup-${createdAt}.tar.gz`
|
||||
// add the tenant to the bucket path if backing up within a multi-tenant environment
|
||||
if (env.MULTI_TENANCY) {
|
||||
const tenantId = tenancy.getTenantIDFromAppID(appId)
|
||||
filename = `${tenantId}/${filename}`
|
||||
}
|
||||
const bucket = objectStore.ObjectStoreBuckets.BACKUPS
|
||||
const metadata = {
|
||||
appId,
|
||||
createdAt,
|
||||
trigger,
|
||||
name,
|
||||
}
|
||||
await objectStore.upload({
|
||||
path: tarPath,
|
||||
type: "application/gzip",
|
||||
bucket,
|
||||
filename,
|
||||
metadata,
|
||||
})
|
||||
await backups.storeAppBackupMetadata(filename, metadata)
|
||||
// clear up the tarball after uploading it
|
||||
fs.rmSync(tarPath)
|
||||
})
|
||||
}
|
|
@ -1,7 +1,9 @@
|
|||
import * as exportApps from "./exports"
|
||||
import * as importApps from "./imports"
|
||||
import * as backup from "./backup"
|
||||
|
||||
export default {
|
||||
...exportApps,
|
||||
...importApps,
|
||||
...backup,
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@ export interface SearchAppBackupsRequest {
|
|||
trigger: AppBackupTrigger
|
||||
startDate: string
|
||||
endDate: string
|
||||
page?: string
|
||||
}
|
||||
|
||||
export interface CreateAppBackupRequest {
|
||||
|
|
|
@ -6,16 +6,35 @@ export enum AppBackupTrigger {
|
|||
SCHEDULED = "scheduled",
|
||||
}
|
||||
|
||||
export interface AppBackupContents {
|
||||
datasources: string[]
|
||||
screens: string[]
|
||||
automations: string[]
|
||||
}
|
||||
|
||||
export interface AppBackup extends Document {
|
||||
trigger: AppBackupTrigger
|
||||
name: string
|
||||
date: string
|
||||
userId: string
|
||||
contents: AppBackupContents
|
||||
createdAt: string
|
||||
filename: string
|
||||
appId: string
|
||||
userId?: string
|
||||
contents?: {
|
||||
datasources: string[]
|
||||
screens: string[]
|
||||
automations: string[]
|
||||
}
|
||||
}
|
||||
|
||||
export type AppBackupFetchOpts = {
|
||||
trigger?: AppBackupTrigger
|
||||
limit?: number
|
||||
page?: string
|
||||
paginate?: boolean
|
||||
startDate?: string
|
||||
endDate?: string
|
||||
}
|
||||
|
||||
export interface AppBackupQueueData {
|
||||
trigger: AppBackupTrigger
|
||||
name?: string
|
||||
appId: string
|
||||
}
|
||||
|
||||
export interface AppBackupMetadata extends AppBackupQueueData {
|
||||
createdAt: string
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue