diff --git a/packages/client/yarn.lock b/packages/client/yarn.lock index 4ed5b395fe..fbe688c0b7 100644 --- a/packages/client/yarn.lock +++ b/packages/client/yarn.lock @@ -670,6 +670,11 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +html5-qrcode@^2.2.1: + version "2.2.3" + resolved "https://registry.yarnpkg.com/html5-qrcode/-/html5-qrcode-2.2.3.tgz#5acb826860365e7c7ab91e1e14528ea16a502e8a" + integrity sha512-9CtEz5FVT56T76entiQxyrASzBWl8Rm30NHiQH8T163Eml5LS14BoZlYel9igxbikOt7O8KhvrT3awN1Y2HMqw== + htmlparser2@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/htmlparser2/-/htmlparser2-6.1.0.tgz#c4d762b6c3371a05dbe65e94ae43a9f845fb8fb7" diff --git a/packages/server/src/api/controllers/deploy/index.ts b/packages/server/src/api/controllers/deploy/index.ts index a51e7ad6ec..a1cb905930 100644 --- a/packages/server/src/api/controllers/deploy/index.ts +++ b/packages/server/src/api/controllers/deploy/index.ts @@ -1,23 +1,25 @@ import Deployment from "./Deployment" import { - Replication, - getProdAppID, getDevelopmentAppID, + getProdAppID, + Replication, } from "@budibase/backend-core/db" import { DocumentType, getAutomationParams } from "../../../db/utils" import { + clearMetadata, disableAllCrons, enableCronTrigger, - clearMetadata, } from "../../../automations/utils" import { app as appCache } from "@budibase/backend-core/cache" import { - getAppId, getAppDB, - getProdAppDB, + getAppId, getDevAppDB, + getProdAppDB, } from "@budibase/backend-core/context" import { events } from "@budibase/backend-core" +import { backups } from "@budibase/pro" +import { AppBackupTrigger } from "@budibase/types" // the max time we can wait for an invalidation to complete before considering it failed const MAX_PENDING_TIME_MS = 30 * 60000 @@ -98,13 +100,18 @@ async function initDeployedApp(prodAppId: any) { console.log("Enabled cron triggers for deployed app..") } -async function deployApp(deployment: any) { +async function deployApp(deployment: any, userId: string) { let replication try { const appId = getAppId() const devAppId = getDevelopmentAppID(appId) const productionAppId = getProdAppID(appId) + // trigger backup initially + await backups.triggerAppBackup(productionAppId, AppBackupTrigger.PUBLISH, { + createdBy: userId, + }) + const config: any = { source: devAppId, target: productionAppId, @@ -205,7 +212,7 @@ const _deployApp = async function (ctx: any) { console.log("Deploying app...") - let app = await deployApp(deployment) + let app = await deployApp(deployment, ctx.user._id) await events.app.published(app) ctx.body = deployment diff --git a/packages/server/src/constants/index.js b/packages/server/src/constants/index.js index 6b5bc4bf83..a3bccae754 100644 --- a/packages/server/src/constants/index.js +++ b/packages/server/src/constants/index.js @@ -209,6 +209,4 @@ exports.AutomationErrors = { // pass through the list from the auth/core lib exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets -exports.ATTACHMENT_DIR = "attachments" - exports.MAX_AUTOMATION_RECURRING_ERRORS = 5 diff --git a/packages/server/src/sdk/app/backups/backup.ts b/packages/server/src/sdk/app/backups/backup.ts index cb758536bd..cc2338e274 100644 --- a/packages/server/src/sdk/app/backups/backup.ts +++ b/packages/server/src/sdk/app/backups/backup.ts @@ -1,16 +1,55 @@ import { backups } from "@budibase/pro" -import { objectStore, tenancy } from "@budibase/backend-core" +import { objectStore, tenancy, db as dbCore } from "@budibase/backend-core" +import { AppBackupQueueData } from "@budibase/types" import { exportApp } from "./exports" +import { importApp } from "./imports" import { Job } from "bull" import fs from "fs" import env from "../../../environment" -async function importProcessor(job: Job) {} +async function removeExistingApp(devId: string) { + const devDb = dbCore.dangerousGetDB(devId, { skip_setup: true }) + await devDb.destroy() +} + +async function importProcessor(job: Job) { + const data: AppBackupQueueData = job.data + const appId = data.appId, + backupId = data.import!.backupId + const tenantId = tenancy.getTenantIDFromAppID(appId) + tenancy.doInTenant(tenantId, async () => { + const devAppId = dbCore.getDevAppID(appId) + const performImport = async (path: string) => { + await importApp(devAppId, dbCore.dangerousGetDB(devAppId), { + file: { + type: "application/gzip", + path, + }, + key: path, + }) + } + // initially export the current state to disk - incase something goes wrong + const backupTarPath = await exportApp(devAppId, { tar: true }) + // get the backup ready on disk + const { path } = await backups.downloadAppBackup(backupId) + // start by removing app database and contents of bucket - which will be updated + await removeExistingApp(devAppId) + try { + await performImport(path) + } catch (err) { + // rollback - clear up failed import and re-import the pre-backup + await removeExistingApp(devAppId) + await performImport(backupTarPath) + } + fs.rmSync(backupTarPath) + }) +} async function exportProcessor(job: Job) { - const appId = job.data.appId, - trigger = job.data.trigger, - name = job.data.name + const data: AppBackupQueueData = job.data + const appId = data.appId, + trigger = data.export!.trigger, + name = data.export!.name const tenantId = tenancy.getTenantIDFromAppID(appId) await tenancy.doInTenant(tenantId, async () => { const createdAt = new Date().toISOString() diff --git a/packages/server/src/sdk/app/backups/constants.ts b/packages/server/src/sdk/app/backups/constants.ts index f022168846..2f011ea2de 100644 --- a/packages/server/src/sdk/app/backups/constants.ts +++ b/packages/server/src/sdk/app/backups/constants.ts @@ -1,4 +1,2 @@ -import { ATTACHMENT_DIR as attachmentDir } from "../../../constants" export const DB_EXPORT_FILE = "db.txt" -export const ATTACHMENT_DIR = attachmentDir export const GLOBAL_DB_EXPORT_FILE = "global.txt" diff --git a/packages/server/src/sdk/app/backups/exports.ts b/packages/server/src/sdk/app/backups/exports.ts index 4656a83d51..4ca8c439a7 100644 --- a/packages/server/src/sdk/app/backups/exports.ts +++ b/packages/server/src/sdk/app/backups/exports.ts @@ -1,6 +1,9 @@ import { db as dbCore } from "@budibase/backend-core" import { budibaseTempDir } from "../../../utilities/budibaseDir" -import { retrieveDirectory } from "../../../utilities/fileSystem/utilities" +import { + retrieveDirectory, + retrieve, +} from "../../../utilities/fileSystem/utilities" import { streamFile } from "../../../utilities/fileSystem" import { ObjectStoreBuckets } from "../../../constants" import { @@ -8,11 +11,7 @@ import { TABLE_ROW_PREFIX, USER_METDATA_PREFIX, } from "../../../db/utils" -import { - DB_EXPORT_FILE, - GLOBAL_DB_EXPORT_FILE, - ATTACHMENT_DIR, -} from "./constants" +import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants" import fs from "fs" import { join } from "path" const uuid = require("uuid/v4") @@ -87,21 +86,19 @@ function defineFilter(excludeRows?: boolean) { */ export async function exportApp(appId: string, config?: ExportOpts) { const prodAppId = dbCore.getProdAppID(appId) - const attachmentsPath = `${prodAppId}/${ATTACHMENT_DIR}` - // export attachments to tmp - const tmpPath = await retrieveDirectory( - ObjectStoreBuckets.APPS, - attachmentsPath - ) - const downloadedPath = join(tmpPath, attachmentsPath), - tmpAttachmentPath = join(tmpPath, ATTACHMENT_DIR) + const appPath = `${prodAppId}/` + // export bucket contents + const tmpPath = await retrieveDirectory(ObjectStoreBuckets.APPS, appPath) + const downloadedPath = join(tmpPath, appPath) if (fs.existsSync(downloadedPath)) { - // move out of app directory, simplify structure - fs.renameSync(downloadedPath, tmpAttachmentPath) + const allFiles = fs.readdirSync(downloadedPath) + for (let file of allFiles) { + const path = join(downloadedPath, file) + // move out of app directory, simplify structure + fs.renameSync(path, join(downloadedPath, "..", file)) + } // remove the old app directory created by object export - fs.rmdirSync(join(tmpPath, prodAppId)) - } else { - fs.mkdirSync(tmpAttachmentPath) + fs.rmdirSync(downloadedPath) } // enforce an export of app DB to the tmp path const dbPath = join(tmpPath, DB_EXPORT_FILE) @@ -113,7 +110,7 @@ export async function exportApp(appId: string, config?: ExportOpts) { // if tar requested, return where the tarball is if (config?.tar) { // now the tmpPath contains both the DB export and attachments, tar this - const tarPath = tarFilesToTmp(tmpPath, [ATTACHMENT_DIR, DB_EXPORT_FILE]) + const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath)) // cleanup the tmp export files as tarball returned fs.rmSync(tmpPath, { recursive: true, force: true }) return tarPath diff --git a/packages/server/src/sdk/app/backups/imports.ts b/packages/server/src/sdk/app/backups/imports.ts index 60ce63d51e..b29a9eede5 100644 --- a/packages/server/src/sdk/app/backups/imports.ts +++ b/packages/server/src/sdk/app/backups/imports.ts @@ -1,12 +1,11 @@ import { db as dbCore } from "@budibase/backend-core" import { TABLE_ROW_PREFIX } from "../../../db/utils" import { budibaseTempDir } from "../../../utilities/budibaseDir" +import { DB_EXPORT_FILE, GLOBAL_DB_EXPORT_FILE } from "./constants" import { - DB_EXPORT_FILE, - ATTACHMENT_DIR, - GLOBAL_DB_EXPORT_FILE, -} from "./constants" -import { uploadDirectory } from "../../../utilities/fileSystem/utilities" + uploadDirectory, + upload, +} from "../../../utilities/fileSystem/utilities" import { ObjectStoreBuckets, FieldTypes } from "../../../constants" import { join } from "path" import fs from "fs" @@ -127,14 +126,32 @@ export async function importApp( template.file && fs.lstatSync(template.file.path).isDirectory() if (template.file && (isTar || isDirectory)) { const tmpPath = isTar ? untarFile(template.file) : template.file.path - const attachmentPath = join(tmpPath, ATTACHMENT_DIR) + const contents = fs.readdirSync(tmpPath) // have to handle object import - if (fs.existsSync(attachmentPath)) { - await uploadDirectory( - ObjectStoreBuckets.APPS, - attachmentPath, - join(prodAppId, ATTACHMENT_DIR) - ) + if (contents.length) { + let promises = [] + let excludedFiles = [GLOBAL_DB_EXPORT_FILE, DB_EXPORT_FILE] + for (let filename of contents) { + const path = join(tmpPath, filename) + if (excludedFiles.includes(filename)) { + continue + } + filename = join(prodAppId, filename) + if (fs.lstatSync(path).isDirectory()) { + promises.push( + uploadDirectory(ObjectStoreBuckets.APPS, path, filename) + ) + } else { + promises.push( + upload({ + bucket: ObjectStoreBuckets.APPS, + path, + filename, + }) + ) + } + } + await Promise.all(promises) } dbStream = fs.createReadStream(join(tmpPath, DB_EXPORT_FILE)) } else {