Full import implementation - needs further testing, untars the file with all apps, then adds each of them individually.
This commit is contained in:
parent
2eae3f2a6c
commit
d1c9a56e9a
|
@ -21,7 +21,6 @@
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import { onMount } from "svelte"
|
import { onMount } from "svelte"
|
||||||
import { apps, auth, admin, templates, licensing } from "stores/portal"
|
import { apps, auth, admin, templates, licensing } from "stores/portal"
|
||||||
import download from "downloadjs"
|
|
||||||
import { goto } from "@roxi/routify"
|
import { goto } from "@roxi/routify"
|
||||||
import AppRow from "components/start/AppRow.svelte"
|
import AppRow from "components/start/AppRow.svelte"
|
||||||
import { AppStatus } from "constants"
|
import { AppStatus } from "constants"
|
||||||
|
@ -140,7 +139,7 @@
|
||||||
|
|
||||||
const initiateAppsExport = () => {
|
const initiateAppsExport = () => {
|
||||||
try {
|
try {
|
||||||
download(`/api/cloud/export`)
|
window.location = `/api/cloud/export`
|
||||||
notifications.success("Apps exported successfully")
|
notifications.success("Apps exported successfully")
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
notifications.error(`Error exporting apps: ${err}`)
|
notifications.error(`Error exporting apps: ${err}`)
|
||||||
|
|
|
@ -1,9 +1,45 @@
|
||||||
const env = require("../../environment")
|
const env = require("../../environment")
|
||||||
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
|
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
|
||||||
|
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||||
const { streamFile } = require("../../utilities/fileSystem")
|
const { streamFile } = require("../../utilities/fileSystem")
|
||||||
const { DocumentType, isDevAppID } = require("../../db/utils")
|
const { stringToReadStream } = require("../../utilities")
|
||||||
|
const {
|
||||||
|
getDocParams,
|
||||||
|
DocumentType,
|
||||||
|
isDevAppID,
|
||||||
|
APP_PREFIX,
|
||||||
|
} = require("../../db/utils")
|
||||||
|
const { create } = require("./application")
|
||||||
|
const { join } = require("path")
|
||||||
|
const fs = require("fs")
|
||||||
const sdk = require("../../sdk")
|
const sdk = require("../../sdk")
|
||||||
|
|
||||||
|
async function createApp(appName, appDirectory) {
|
||||||
|
const ctx = {
|
||||||
|
request: {
|
||||||
|
body: {
|
||||||
|
useTemplate: true,
|
||||||
|
name: appName,
|
||||||
|
},
|
||||||
|
files: {
|
||||||
|
templateFile: {
|
||||||
|
path: appDirectory,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return create(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getAllDocType(db, docType) {
|
||||||
|
const response = await db.allDocs(
|
||||||
|
getDocParams(docType, null, {
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
return response.rows.map(row => row.doc)
|
||||||
|
}
|
||||||
|
|
||||||
exports.exportApps = async ctx => {
|
exports.exportApps = async ctx => {
|
||||||
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
if (env.SELF_HOSTED || !env.MULTI_TENANCY) {
|
||||||
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
||||||
|
@ -14,10 +50,13 @@ exports.exportApps = async ctx => {
|
||||||
})
|
})
|
||||||
// only export the dev apps as they will be the latest, the user can republish the apps
|
// only export the dev apps as they will be the latest, the user can republish the apps
|
||||||
// in their self-hosted environment
|
// in their self-hosted environment
|
||||||
let appIds = apps
|
let appMetadata = apps
|
||||||
.map(app => app.appId || app._id)
|
.filter(app => isDevAppID(app.appId || app._id))
|
||||||
.filter(appId => isDevAppID(appId))
|
.map(app => ({ appId: app.appId || app._id, name: app.name }))
|
||||||
const tmpPath = await sdk.backups.exportMultipleApps(appIds, globalDBString)
|
const tmpPath = await sdk.backups.exportMultipleApps(
|
||||||
|
appMetadata,
|
||||||
|
globalDBString
|
||||||
|
)
|
||||||
const filename = `cloud-export-${new Date().getTime()}.tar.gz`
|
const filename = `cloud-export-${new Date().getTime()}.tar.gz`
|
||||||
ctx.attachment(filename)
|
ctx.attachment(filename)
|
||||||
ctx.body = streamFile(tmpPath)
|
ctx.body = streamFile(tmpPath)
|
||||||
|
@ -48,51 +87,37 @@ exports.importApps = async ctx => {
|
||||||
"Import file is required and environment must be fresh to import apps."
|
"Import file is required and environment must be fresh to import apps."
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
if (ctx.request.files.importFile.type !== "application/gzip") {
|
||||||
|
ctx.throw(400, "Import file must be a gzipped tarball.")
|
||||||
|
}
|
||||||
|
|
||||||
// TODO: IMPLEMENT TARBALL EXTRACTION, APP IMPORT, ATTACHMENT IMPORT AND GLOBAL DB IMPORT
|
// initially get all the app databases out of the tarball
|
||||||
// async function getAllDocType(db, docType) {
|
const tmpPath = sdk.backups.untarFile(ctx.request.file.importFile)
|
||||||
// const response = await db.allDocs(
|
const globalDbImport = sdk.backups.getGlobalDBFile(tmpPath)
|
||||||
// getDocParams(docType, null, {
|
const appNames = fs
|
||||||
// include_docs: true,
|
.readdirSync(tmpPath)
|
||||||
// })
|
.filter(dir => dir.startsWith(APP_PREFIX))
|
||||||
// )
|
|
||||||
// return response.rows.map(row => row.doc)
|
const globalDb = getGlobalDB()
|
||||||
// }
|
// load the global db first
|
||||||
// async function createApp(appName, appImport) {
|
await globalDb.load(stringToReadStream(globalDbImport))
|
||||||
// const ctx = {
|
const appCreationPromises = []
|
||||||
// request: {
|
for (let appName of appNames) {
|
||||||
// body: {
|
appCreationPromises.push(createApp(appName, join(tmpPath, appName)))
|
||||||
// templateString: appImport,
|
}
|
||||||
// name: appName,
|
await Promise.all(appCreationPromises)
|
||||||
// },
|
|
||||||
// },
|
// if there are any users make sure to remove them
|
||||||
// }
|
let users = await getAllDocType(globalDb, DocumentType.USER)
|
||||||
// return create(ctx)
|
let userDeletionPromises = []
|
||||||
// }
|
for (let user of users) {
|
||||||
// const importFile = ctx.request.files.importFile
|
userDeletionPromises.push(globalDb.remove(user._id, user._rev))
|
||||||
// const importString = readFileSync(importFile.path)
|
}
|
||||||
// const dbs = JSON.parse(importString)
|
if (userDeletionPromises.length > 0) {
|
||||||
// const globalDbImport = dbs.global
|
await Promise.all(userDeletionPromises)
|
||||||
// // remove from the list of apps
|
}
|
||||||
// delete dbs.global
|
|
||||||
// const globalDb = getGlobalDB()
|
await globalDb.bulkDocs(users)
|
||||||
// // load the global db first
|
|
||||||
// await globalDb.load(stringToReadStream(globalDbImport))
|
|
||||||
// for (let [appName, appImport] of Object.entries(dbs)) {
|
|
||||||
// await createApp(appName, appImport)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// // if there are any users make sure to remove them
|
|
||||||
// let users = await getAllDocType(globalDb, DocumentType.USER)
|
|
||||||
// let userDeletionPromises = []
|
|
||||||
// for (let user of users) {
|
|
||||||
// userDeletionPromises.push(globalDb.remove(user._id, user._rev))
|
|
||||||
// }
|
|
||||||
// if (userDeletionPromises.length > 0) {
|
|
||||||
// await Promise.all(userDeletionPromises)
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// await globalDb.bulkDocs(users)
|
|
||||||
ctx.body = {
|
ctx.body = {
|
||||||
message: "Apps successfully imported.",
|
message: "Apps successfully imported.",
|
||||||
}
|
}
|
||||||
|
|
|
@ -127,28 +127,33 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
||||||
/**
|
/**
|
||||||
* Export all apps + global DB (if supplied) to a single tarball, this includes
|
* Export all apps + global DB (if supplied) to a single tarball, this includes
|
||||||
* the attachments for each app as well.
|
* the attachments for each app as well.
|
||||||
* @param {string[]} appIds The IDs of the apps to be exported.
|
* @param {object[]} appMetadata The IDs and names of apps to export.
|
||||||
* @param {string} globalDbContents The contents of the global DB to export as well.
|
* @param {string} globalDbContents The contents of the global DB to export as well.
|
||||||
* @return {string} The path to the tarball.
|
* @return {string} The path to the tarball.
|
||||||
*/
|
*/
|
||||||
export async function exportMultipleApps(
|
export async function exportMultipleApps(
|
||||||
appIds: string[],
|
appMetadata: { appId: string; name: string }[],
|
||||||
globalDbContents?: string
|
globalDbContents?: string
|
||||||
) {
|
) {
|
||||||
const tmpPath = join(budibaseTempDir(), uuid())
|
const tmpPath = join(budibaseTempDir(), uuid())
|
||||||
|
fs.mkdirSync(tmpPath)
|
||||||
let exportPromises: Promise<void>[] = []
|
let exportPromises: Promise<void>[] = []
|
||||||
const exportAndMove = async (appId: string) => {
|
// export each app to a directory, then move it into the complete export
|
||||||
|
const exportAndMove = async (appId: string, appName: string) => {
|
||||||
const path = await exportApp(appId)
|
const path = await exportApp(appId)
|
||||||
await fs.promises.rename(path, join(tmpPath, appId))
|
await fs.promises.rename(path, join(tmpPath, appId))
|
||||||
}
|
}
|
||||||
for (let appId of appIds) {
|
for (let metadata of appMetadata) {
|
||||||
exportPromises.push(exportAndMove(appId))
|
exportPromises.push(exportAndMove(metadata.appId, metadata.name))
|
||||||
}
|
}
|
||||||
|
// wait for all exports to finish
|
||||||
await Promise.all(exportPromises)
|
await Promise.all(exportPromises)
|
||||||
|
// add the global DB contents
|
||||||
if (globalDbContents) {
|
if (globalDbContents) {
|
||||||
fs.writeFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), globalDbContents)
|
fs.writeFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), globalDbContents)
|
||||||
}
|
}
|
||||||
const tarPath = tarFilesToTmp(tmpPath, [...appIds, GLOBAL_DB_EXPORT_FILE])
|
const appNames = appMetadata.map(metadata => metadata.name)
|
||||||
|
const tarPath = tarFilesToTmp(tmpPath, [...appNames, GLOBAL_DB_EXPORT_FILE])
|
||||||
// clear up the tmp path now tarball generated
|
// clear up the tmp path now tarball generated
|
||||||
fs.rmSync(tmpPath, { recursive: true, force: true })
|
fs.rmSync(tmpPath, { recursive: true, force: true })
|
||||||
return tarPath
|
return tarPath
|
||||||
|
|
|
@ -1,7 +1,11 @@
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
import { TABLE_ROW_PREFIX } from "../../../db/utils"
|
import { TABLE_ROW_PREFIX } from "../../../db/utils"
|
||||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||||
import { DB_EXPORT_FILE, ATTACHMENT_DIR } from "./constants"
|
import {
|
||||||
|
DB_EXPORT_FILE,
|
||||||
|
ATTACHMENT_DIR,
|
||||||
|
GLOBAL_DB_EXPORT_FILE,
|
||||||
|
} from "./constants"
|
||||||
import { uploadDirectory } from "../../../utilities/fileSystem/utilities"
|
import { uploadDirectory } from "../../../utilities/fileSystem/utilities"
|
||||||
import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
|
import { ObjectStoreBuckets, FieldTypes } from "../../../constants"
|
||||||
import { join } from "path"
|
import { join } from "path"
|
||||||
|
@ -91,6 +95,22 @@ async function getTemplateStream(template: TemplateType) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function untarFile(file: { path: string }) {
|
||||||
|
const tmpPath = join(budibaseTempDir(), uuid())
|
||||||
|
fs.mkdirSync(tmpPath)
|
||||||
|
// extract the tarball
|
||||||
|
tar.extract({
|
||||||
|
sync: true,
|
||||||
|
cwd: tmpPath,
|
||||||
|
file: file.path,
|
||||||
|
})
|
||||||
|
return tmpPath
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getGlobalDBFile(tmpPath: string) {
|
||||||
|
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
|
||||||
|
}
|
||||||
|
|
||||||
export async function importApp(
|
export async function importApp(
|
||||||
appId: string,
|
appId: string,
|
||||||
db: PouchDB.Database,
|
db: PouchDB.Database,
|
||||||
|
@ -98,15 +118,11 @@ export async function importApp(
|
||||||
) {
|
) {
|
||||||
let prodAppId = dbCore.getProdAppID(appId)
|
let prodAppId = dbCore.getProdAppID(appId)
|
||||||
let dbStream: any
|
let dbStream: any
|
||||||
if (template.file && template.file.type === "application/gzip") {
|
const isTar = template.file && template.file.type === "application/gzip"
|
||||||
const tmpPath = join(budibaseTempDir(), uuid())
|
const isDirectory =
|
||||||
fs.mkdirSync(tmpPath)
|
template.file && fs.lstatSync(template.file.path).isDirectory()
|
||||||
// extract the tarball
|
if (template.file && (isTar || isDirectory)) {
|
||||||
tar.extract({
|
const tmpPath = isTar ? untarFile(template.file) : template.file.path
|
||||||
sync: true,
|
|
||||||
cwd: tmpPath,
|
|
||||||
file: template.file.path,
|
|
||||||
})
|
|
||||||
const attachmentPath = join(tmpPath, ATTACHMENT_DIR)
|
const attachmentPath = join(tmpPath, ATTACHMENT_DIR)
|
||||||
// have to handle object import
|
// have to handle object import
|
||||||
if (fs.existsSync(attachmentPath)) {
|
if (fs.existsSync(attachmentPath)) {
|
||||||
|
|
Loading…
Reference in New Issue