Merge remote-tracking branch 'origin/feature/app-backups' into feature/backups-ui
This commit is contained in:
commit
2065455a37
|
@ -670,6 +670,11 @@ has@^1.0.3:
|
||||||
dependencies:
|
dependencies:
|
||||||
function-bind "^1.1.1"
|
function-bind "^1.1.1"
|
||||||
|
|
||||||
|
html5-qrcode@^2.2.1:
|
||||||
|
version "2.2.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/html5-qrcode/-/html5-qrcode-2.2.3.tgz#5acb826860365e7c7ab91e1e14528ea16a502e8a"
|
||||||
|
integrity sha512-9CtEz5FVT56T76entiQxyrASzBWl8Rm30NHiQH8T163Eml5LS14BoZlYel9igxbikOt7O8KhvrT3awN1Y2HMqw==
|
||||||
|
|
||||||
htmlparser2@^6.0.0:
|
htmlparser2@^6.0.0:
|
||||||
version "6.1.0"
|
version "6.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7"
|
resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7"
|
||||||
|
|
|
@ -1,23 +1,25 @@
|
||||||
import Deployment from "./Deployment"
|
import Deployment from "./Deployment"
|
||||||
import {
|
import {
|
||||||
Replication,
|
|
||||||
getProdAppID,
|
|
||||||
getDevelopmentAppID,
|
getDevelopmentAppID,
|
||||||
|
getProdAppID,
|
||||||
|
Replication,
|
||||||
} from "@budibase/backend-core/db"
|
} from "@budibase/backend-core/db"
|
||||||
import { DocumentType, getAutomationParams } from "../../../db/utils"
|
import { DocumentType, getAutomationParams } from "../../../db/utils"
|
||||||
import {
|
import {
|
||||||
|
clearMetadata,
|
||||||
disableAllCrons,
|
disableAllCrons,
|
||||||
enableCronTrigger,
|
enableCronTrigger,
|
||||||
clearMetadata,
|
|
||||||
} from "../../../automations/utils"
|
} from "../../../automations/utils"
|
||||||
import { app as appCache } from "@budibase/backend-core/cache"
|
import { app as appCache } from "@budibase/backend-core/cache"
|
||||||
import {
|
import {
|
||||||
getAppId,
|
|
||||||
getAppDB,
|
getAppDB,
|
||||||
getProdAppDB,
|
getAppId,
|
||||||
getDevAppDB,
|
getDevAppDB,
|
||||||
|
getProdAppDB,
|
||||||
} from "@budibase/backend-core/context"
|
} from "@budibase/backend-core/context"
|
||||||
import { events } from "@budibase/backend-core"
|
import { events } from "@budibase/backend-core"
|
||||||
|
import { backups } from "@budibase/pro"
|
||||||
|
import { AppBackupTrigger } from "@budibase/types"
|
||||||
|
|
||||||
// the max time we can wait for an invalidation to complete before considering it failed
|
// the max time we can wait for an invalidation to complete before considering it failed
|
||||||
const MAX_PENDING_TIME_MS = 30 * 60000
|
const MAX_PENDING_TIME_MS = 30 * 60000
|
||||||
|
@ -98,13 +100,18 @@ async function initDeployedApp(prodAppId: any) {
|
||||||
console.log("Enabled cron triggers for deployed app..")
|
console.log("Enabled cron triggers for deployed app..")
|
||||||
}
|
}
|
||||||
|
|
||||||
async function deployApp(deployment: any) {
|
async function deployApp(deployment: any, userId: string) {
|
||||||
let replication
|
let replication
|
||||||
try {
|
try {
|
||||||
const appId = getAppId()
|
const appId = getAppId()
|
||||||
const devAppId = getDevelopmentAppID(appId)
|
const devAppId = getDevelopmentAppID(appId)
|
||||||
const productionAppId = getProdAppID(appId)
|
const productionAppId = getProdAppID(appId)
|
||||||
|
|
||||||
|
// trigger backup initially
|
||||||
|
await backups.triggerAppBackup(productionAppId, AppBackupTrigger.PUBLISH, {
|
||||||
|
createdBy: userId,
|
||||||
|
})
|
||||||
|
|
||||||
const config: any = {
|
const config: any = {
|
||||||
source: devAppId,
|
source: devAppId,
|
||||||
target: productionAppId,
|
target: productionAppId,
|
||||||
|
@ -205,7 +212,7 @@ const _deployApp = async function (ctx: any) {
|
||||||
|
|
||||||
console.log("Deploying app...")
|
console.log("Deploying app...")
|
||||||
|
|
||||||
let app = await deployApp(deployment)
|
let app = await deployApp(deployment, ctx.user._id)
|
||||||
|
|
||||||
await events.app.published(app)
|
await events.app.published(app)
|
||||||
ctx.body = deployment
|
ctx.body = deployment
|
||||||
|
|
|
@ -209,6 +209,4 @@ exports.AutomationErrors = {
|
||||||
// pass through the list from the auth/core lib
|
// pass through the list from the auth/core lib
|
||||||
exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
||||||
|
|
||||||
exports.ATTACHMENT_DIR = "attachments"
|
|
||||||
|
|
||||||
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5
|
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5
|
||||||
|
|
|
@ -1,16 +1,55 @@
|
||||||
import { backups } from "@budibase/pro"
|
import { backups } from "@budibase/pro"
|
||||||
import { objectStore, tenancy } from "@budibase/backend-core"
|
import { objectStore, tenancy, db as dbCore } from "@budibase/backend-core"
|
||||||
|
import { AppBackupQueueData } from "@budibase/types"
|
||||||
import { exportApp } from "./exports"
|
import { exportApp } from "./exports"
|
||||||
|
import { importApp } from "./imports"
|
||||||
import { Job } from "bull"
|
import { Job } from "bull"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
|
|
||||||
async function importProcessor(job: Job) {}
|
async function removeExistingApp(devId: string) {
|
||||||
|
const devDb = dbCore.dangerousGetDB(devId, { skip_setup: true })
|
||||||
|
await devDb.destroy()
|
||||||
|
}
|
||||||
|
|
||||||
|
async function importProcessor(job: Job) {
|
||||||
|
const data: AppBackupQueueData = job.data
|
||||||
|
const appId = data.appId,
|
||||||
|
backupId = data.import!.backupId
|
||||||
|
const tenantId = tenancy.getTenantIDFromAppID(appId)
|
||||||
|
tenancy.doInTenant(tenantId, async () => {
|
||||||
|
const devAppId = dbCore.getDevAppID(appId)
|
||||||
|
const performImport = async (path: string) => {
|
||||||
|
await importApp(devAppId, dbCore.dangerousGetDB(devAppId), {
|
||||||
|
file: {
|
||||||
|
type: "application/gzip",
|
||||||
|
path,
|
||||||
|
},
|
||||||
|
key: path,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// initially export the current state to disk - incase something goes wrong
|
||||||
|
const backupTarPath = await exportApp(devAppId, { tar: true })
|
||||||
|
// get the backup ready on disk
|
||||||
|
const { path } = await backups.downloadAppBackup(backupId)
|
||||||
|
// start by removing app database and contents of bucket - which will be updated
|
||||||
|
await removeExistingApp(devAppId)
|
||||||
|
try {
|
||||||
|
await performImport(path)
|
||||||
|
} catch (err) {
|
||||||
|
// rollback - clear up failed import and re-import the pre-backup
|
||||||
|
await removeExistingApp(devAppId)
|
||||||
|
await performImport(backupTarPath)
|
||||||
|
}
|
||||||
|
fs.rmSync(backupTarPath)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
async function exportProcessor(job: Job) {
|
async function exportProcessor(job: Job) {
|
||||||
const appId = job.data.appId,
|
const data: AppBackupQueueData = job.data
|
||||||
trigger = job.data.trigger,
|
const appId = data.appId,
|
||||||
name = job.data.name
|
trigger = data.export!.trigger,
|
||||||
|
name = data.export!.name
|
||||||
const tenantId = tenancy.getTenantIDFromAppID(appId)
|
const tenantId = tenancy.getTenantIDFromAppID(appId)
|
||||||
await tenancy.doInTenant(tenantId, async () => {
|
await tenancy.doInTenant(tenantId, async () => {
|
||||||
const createdAt = new Date().toISOString()
|
const createdAt = new Date().toISOString()
|
||||||
|
|
|
@ -1,4 +1,2 @@
|
||||||
import { ATTACHMENT_DIR as attachmentDir } from "../../../constants"
|
|
||||||
export const DB_EXPORT_FILE = "db.txt"
|
export const DB_EXPORT_FILE = "db.txt"
|
||||||
export const ATTACHMENT_DIR = attachmentDir
|
|
||||||
export const GLOBAL_DB_EXPORT_FILE = "global.txt"
|
export const GLOBAL_DB_EXPORT_FILE = "global.txt"
|
||||||
|
|
|
@ -1,6 +1,9 @@
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||||
import { retrieveDirectory } from "../../../utilities/fileSystem/utilities"
|
import {
|
||||||
|
retrieveDirectory,
|
||||||
|
retrieve,
|
||||||
|
} from "../../../utilities/fileSystem/utilities"
|
||||||
import { streamFile } from "../../../utilities/fileSystem"
|
import { streamFile } from "../../../utilities/fileSystem"
|
||||||
import { ObjectStoreBuckets } from "../../../constants"
|
import { ObjectStoreBuckets } from "../../../constants"
|
||||||
import {
|
import {
|
||||||
|
@ -8,11 +11,7 @@ import {
|
||||||
TABLE_ROW_PREFIX,
|
TABLE_ROW_PREFIX,
|
||||||
USER_METDATA_PREFIX,
|
USER_METDATA_PREFIX,
|
||||||
} from "../../../db/utils"
|
} from "../../../db/utils"
|
||||||
import {
|
import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants"
|
||||||
DB_EXPORT_FILE,
|
|
||||||
GLOBAL_DB_EXPORT_FILE,
|
|
||||||
ATTACHMENT_DIR,
|
|
||||||
} from "./constants"
|
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
const uuid = require("uuid/v4")
|
const uuid = require("uuid/v4")
|
||||||
|
@ -87,21 +86,19 @@ function defineFilter(excludeRows?: boolean) {
|
||||||
*/
|
*/
|
||||||
export async function exportApp(appId: string, config?: ExportOpts) {
|
export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
const prodAppId = dbCore.getProdAppID(appId)
|
const prodAppId = dbCore.getProdAppID(appId)
|
||||||
const attachmentsPath = `${prodAppId}/${ATTACHMENT_DIR}`
|
const appPath = `${prodAppId}/`
|
||||||
// export attachments to tmp
|
// export bucket contents
|
||||||
const tmpPath = await retrieveDirectory(
|
const tmpPath = await retrieveDirectory(ObjectStoreBuckets.APPS, appPath)
|
||||||
ObjectStoreBuckets.APPS,
|
const downloadedPath = join(tmpPath, appPath)
|
||||||
attachmentsPath
|
|
||||||
)
|
|
||||||
const downloadedPath = join(tmpPath, attachmentsPath),
|
|
||||||
tmpAttachmentPath = join(tmpPath, ATTACHMENT_DIR)
|
|
||||||
if (fs.existsSync(downloadedPath)) {
|
if (fs.existsSync(downloadedPath)) {
|
||||||
// move out of app directory, simplify structure
|
const allFiles = fs.readdirSync(downloadedPath)
|
||||||
fs.renameSync(downloadedPath, tmpAttachmentPath)
|
for (let file of allFiles) {
|
||||||
|
const path = join(downloadedPath, file)
|
||||||
|
// move out of app directory, simplify structure
|
||||||
|
fs.renameSync(path, join(downloadedPath, "..", file))
|
||||||
|
}
|
||||||
// remove the old app directory created by object export
|
// remove the old app directory created by object export
|
||||||
fs.rmdirSync(join(tmpPath, prodAppId))
|
fs.rmdirSync(downloadedPath)
|
||||||
} else {
|
|
||||||
fs.mkdirSync(tmpAttachmentPath)
|
|
||||||
}
|
}
|
||||||
// enforce an export of app DB to the tmp path
|
// enforce an export of app DB to the tmp path
|
||||||
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
const dbPath = join(tmpPath, DB_EXPORT_FILE)
|
||||||
|
@ -113,7 +110,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
// if tar requested, return where the tarball is
|
// if tar requested, return where the tarball is
|
||||||
if (config?.tar) {
|
if (config?.tar) {
|
||||||
// now the tmpPath contains both the DB export and attachments, tar this
|
// now the tmpPath contains both the DB export and attachments, tar this
|
||||||
const tarPath = tarFilesToTmp(tmpPath, [ATTACHMENT_DIR, DB_EXPORT_FILE])
|
const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
|
||||||
// cleanup the tmp export files as tarball returned
|
// cleanup the tmp export files as tarball returned
|
||||||
fs.rmSync(tmpPath, { recursive: true, force: true })
|
fs.rmSync(tmpPath, { recursive: true, force: true })
|
||||||
return tarPath
|
return tarPath
|
||||||
|
|
|
@ -1,12 +1,11 @@
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
import { TABLE_ROW_PREFIX } from "../../../db/utils"
|
import { TABLE_ROW_PREFIX } from "../../../db/utils"
|
||||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||||
|
import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants"
|
||||||
import {
|
import {
|
||||||
DB_EXPORT_FILE,
|
uploadDirectory,
|
||||||
ATTACHMENT_DIR,
|
upload,
|
||||||
GLOBAL_DB_EXPORT_FILE,
|
} from "../../../utilities/fileSystem/utilities"
|
||||||
} from "./constants"
|
|
||||||
import { uploadDirectory } from "../../../utilities/fileSystem/utilities"
|
|
||||||
import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
|
import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
|
@ -127,14 +126,32 @@ export async function importApp(
|
||||||
template.file && fs.lstatSync(template.file.path).isDirectory()
|
template.file && fs.lstatSync(template.file.path).isDirectory()
|
||||||
if (template.file && (isTar || isDirectory)) {
|
if (template.file && (isTar || isDirectory)) {
|
||||||
const tmpPath = isTar ? untarFile(template.file) : template.file.path
|
const tmpPath = isTar ? untarFile(template.file) : template.file.path
|
||||||
const attachmentPath = join(tmpPath, ATTACHMENT_DIR)
|
const contents = fs.readdirSync(tmpPath)
|
||||||
// have to handle object import
|
// have to handle object import
|
||||||
if (fs.existsSync(attachmentPath)) {
|
if (contents.length) {
|
||||||
await uploadDirectory(
|
let promises = []
|
||||||
ObjectStoreBuckets.APPS,
|
let excludedFiles = [GLOBAL_DB_EXPORT_FILE, DB_EXPORT_FILE]
|
||||||
attachmentPath,
|
for (let filename of contents) {
|
||||||
join(prodAppId, ATTACHMENT_DIR)
|
const path = join(tmpPath, filename)
|
||||||
)
|
if (excludedFiles.includes(filename)) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
filename = join(prodAppId, filename)
|
||||||
|
if (fs.lstatSync(path).isDirectory()) {
|
||||||
|
promises.push(
|
||||||
|
uploadDirectory(ObjectStoreBuckets.APPS, path, filename)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
promises.push(
|
||||||
|
upload({
|
||||||
|
bucket: ObjectStoreBuckets.APPS,
|
||||||
|
path,
|
||||||
|
filename,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
await Promise.all(promises)
|
||||||
}
|
}
|
||||||
dbStream = fs.createReadStream(join(tmpPath, DB_EXPORT_FILE))
|
dbStream = fs.createReadStream(join(tmpPath, DB_EXPORT_FILE))
|
||||||
} else {
|
} else {
|
||||||
|
|
Loading…
Reference in New Issue