Adding import functionality - still need to update the attachments URL.
This commit is contained in:
parent
f237befbce
commit
19133f08e6
|
@ -36,16 +36,16 @@ const STRING_CONTENT_TYPES = [
|
|||
]
|
||||
|
||||
// does normal sanitization and then swaps dev apps to apps
|
||||
export function sanitizeKey(input: any) {
|
||||
export function sanitizeKey(input: string) {
|
||||
return sanitize(sanitizeBucket(input)).replace(/\\/g, "/")
|
||||
}
|
||||
|
||||
// simply handles the dev app to app conversion
|
||||
export function sanitizeBucket(input: any) {
|
||||
export function sanitizeBucket(input: string) {
|
||||
return input.replace(new RegExp(APP_DEV_PREFIX, "g"), APP_PREFIX)
|
||||
}
|
||||
|
||||
function publicPolicy(bucketName: any) {
|
||||
function publicPolicy(bucketName: string) {
|
||||
return {
|
||||
Version: "2012-10-17",
|
||||
Statement: [
|
||||
|
@ -73,7 +73,7 @@ const PUBLIC_BUCKETS = [
|
|||
* @return {Object} an S3 object store object, check S3 Nodejs SDK for usage.
|
||||
* @constructor
|
||||
*/
|
||||
export const ObjectStore = (bucket: any) => {
|
||||
export const ObjectStore = (bucket: string) => {
|
||||
const config: any = {
|
||||
s3ForcePathStyle: true,
|
||||
signatureVersion: "v4",
|
||||
|
@ -295,7 +295,7 @@ export const deleteFile = async (bucketName: string, filepath: string) => {
|
|||
return objectStore.deleteObject(params)
|
||||
}
|
||||
|
||||
export const deleteFiles = async (bucketName: any, filepaths: any) => {
|
||||
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
|
||||
const objectStore = ObjectStore(bucketName)
|
||||
await makeSureBucketExists(objectStore, bucketName)
|
||||
const params = {
|
||||
|
@ -311,8 +311,8 @@ export const deleteFiles = async (bucketName: any, filepaths: any) => {
|
|||
* Delete a path, including everything within.
|
||||
*/
|
||||
export const deleteFolder = async (
|
||||
bucketName: any,
|
||||
folder: any
|
||||
bucketName: string,
|
||||
folder: string
|
||||
): Promise<any> => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
folder = sanitizeKey(folder)
|
||||
|
@ -345,9 +345,9 @@ export const deleteFolder = async (
|
|||
}
|
||||
|
||||
export const uploadDirectory = async (
|
||||
bucketName: any,
|
||||
localPath: any,
|
||||
bucketPath: any
|
||||
bucketName: string,
|
||||
localPath: string,
|
||||
bucketPath: string
|
||||
) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
let uploads = []
|
||||
|
@ -379,7 +379,11 @@ exports.downloadTarballDirect = async (
|
|||
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
|
||||
}
|
||||
|
||||
export const downloadTarball = async (url: any, bucketName: any, path: any) => {
|
||||
export const downloadTarball = async (
|
||||
url: string,
|
||||
bucketName: string,
|
||||
path: string
|
||||
) => {
|
||||
bucketName = sanitizeBucket(bucketName)
|
||||
path = sanitizeKey(path)
|
||||
const response = await fetch(url)
|
||||
|
|
|
@ -5,11 +5,7 @@ import {
|
|||
createRoutingView,
|
||||
createAllSearchIndex,
|
||||
} from "../../db/views/staticViews"
|
||||
import {
|
||||
getTemplateStream,
|
||||
createApp,
|
||||
deleteApp,
|
||||
} from "../../utilities/fileSystem"
|
||||
import { createApp, deleteApp } from "../../utilities/fileSystem"
|
||||
import {
|
||||
generateAppID,
|
||||
getLayoutParams,
|
||||
|
@ -50,6 +46,7 @@ import { errors, events, migrations } from "@budibase/backend-core"
|
|||
import { App, Layout, Screen, MigrationType } from "@budibase/types"
|
||||
import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts"
|
||||
import { enrichPluginURLs } from "../../utilities/plugins"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
const URL_REGEX_SLASH = /\/|\\/g
|
||||
|
||||
|
@ -153,11 +150,7 @@ async function createInstance(template: any) {
|
|||
throw "Error loading database dump from memory."
|
||||
}
|
||||
} else if (template && template.useTemplate === "true") {
|
||||
/* istanbul ignore next */
|
||||
const { ok } = await db.load(await getTemplateStream(template))
|
||||
if (!ok) {
|
||||
throw "Error loading database dump from template."
|
||||
}
|
||||
await sdk.apps.imports.importApp(appId, db, template)
|
||||
} else {
|
||||
// create the users table
|
||||
await db.put(USERS_TABLE_SCHEMA)
|
||||
|
|
|
@ -5,7 +5,7 @@ require("svelte/register")
|
|||
const send = require("koa-send")
|
||||
const { resolve, join } = require("../../../utilities/centralPath")
|
||||
const uuid = require("uuid")
|
||||
const { ObjectStoreBuckets, ATTACHMENT_PATH } = require("../../../constants")
|
||||
const { ObjectStoreBuckets, ATTACHMENT_DIR } = require("../../../constants")
|
||||
const { processString } = require("@budibase/string-templates")
|
||||
const {
|
||||
loadHandlebarsFile,
|
||||
|
@ -90,7 +90,7 @@ export const uploadFile = async function (ctx: any) {
|
|||
|
||||
return prepareUpload({
|
||||
file,
|
||||
s3Key: `${ctx.appId}/${ATTACHMENT_PATH}/${processedFileName}`,
|
||||
s3Key: `${ctx.appId}/${ATTACHMENT_DIR}/${processedFileName}`,
|
||||
bucket: ObjectStoreBuckets.APPS,
|
||||
})
|
||||
})
|
||||
|
|
|
@ -211,6 +211,6 @@ exports.AutomationErrors = {
|
|||
// pass through the list from the auth/core lib
|
||||
exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
||||
|
||||
exports.ATTACHMENT_PATH = "attachments"
|
||||
exports.ATTACHMENT_DIR = "attachments"
|
||||
|
||||
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
import { ATTACHMENT_DIR as attachmentDir } from "../../constants"
|
||||
export const DB_EXPORT_FILE = "db.txt"
|
||||
export const ATTACHMENT_DIR = attachmentDir
|
||||
export const GLOBAL_DB_EXPORT_FILE = "global.txt"
|
|
@ -2,20 +2,23 @@ import { db as dbCore } from "@budibase/backend-core"
|
|||
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
||||
import { retrieveDirectory } from "../../utilities/fileSystem/utilities"
|
||||
import { streamFile } from "../../utilities/fileSystem"
|
||||
import { ObjectStoreBuckets, ATTACHMENT_PATH } from "../../constants"
|
||||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import {
|
||||
LINK_USER_METADATA_PREFIX,
|
||||
TABLE_ROW_PREFIX,
|
||||
USER_METDATA_PREFIX,
|
||||
} from "../../db/utils"
|
||||
import {
|
||||
DB_EXPORT_FILE,
|
||||
GLOBAL_DB_EXPORT_FILE,
|
||||
ATTACHMENT_DIR,
|
||||
} from "./constants"
|
||||
import fs from "fs"
|
||||
import { join } from "path"
|
||||
const uuid = require("uuid/v4")
|
||||
const tar = require("tar")
|
||||
const MemoryStream = require("memorystream")
|
||||
|
||||
const DB_EXPORT_FILE = "db.txt"
|
||||
const GLOBAL_DB_EXPORT_FILE = "global.txt"
|
||||
type ExportOpts = {
|
||||
filter?: any
|
||||
exportPath?: string
|
||||
|
@ -84,14 +87,14 @@ function defineFilter(excludeRows?: boolean) {
|
|||
*/
|
||||
export async function exportApp(appId: string, config?: ExportOpts) {
|
||||
const prodAppId = dbCore.getProdAppID(appId)
|
||||
const attachmentsPath = `${prodAppId}/${ATTACHMENT_PATH}`
|
||||
const attachmentsPath = `${prodAppId}/${ATTACHMENT_DIR}`
|
||||
// export attachments to tmp
|
||||
const tmpPath = await retrieveDirectory(
|
||||
ObjectStoreBuckets.APPS,
|
||||
attachmentsPath
|
||||
)
|
||||
const downloadedPath = join(tmpPath, attachmentsPath),
|
||||
tmpAttachmentPath = join(tmpPath, ATTACHMENT_PATH)
|
||||
tmpAttachmentPath = join(tmpPath, ATTACHMENT_DIR)
|
||||
if (fs.existsSync(downloadedPath)) {
|
||||
// move out of app directory, simplify structure
|
||||
fs.renameSync(downloadedPath, tmpAttachmentPath)
|
||||
|
@ -110,7 +113,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
// if tar requested, return where the tarball is
|
||||
if (config?.tar) {
|
||||
// now the tmpPath contains both the DB export and attachments, tar this
|
||||
const tarPath = tarFilesToTmp(tmpPath, [ATTACHMENT_PATH, DB_EXPORT_FILE])
|
||||
const tarPath = tarFilesToTmp(tmpPath, [ATTACHMENT_DIR, DB_EXPORT_FILE])
|
||||
// cleanup the tmp export files as tarball returned
|
||||
fs.rmSync(tmpPath, { recursive: true, force: true })
|
||||
return tarPath
|
|
@ -0,0 +1,71 @@
|
|||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
||||
import { DB_EXPORT_FILE, ATTACHMENT_DIR } from "./constants"
|
||||
import { uploadDirectory } from "../../utilities/fileSystem/utilities"
|
||||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import { join } from "path"
|
||||
import fs from "fs"
|
||||
const uuid = require("uuid/v4")
|
||||
const tar = require("tar")
|
||||
|
||||
type TemplateType = {
|
||||
file?: {
|
||||
type: string
|
||||
path: string
|
||||
}
|
||||
key?: string
|
||||
}
|
||||
|
||||
/**
|
||||
* This function manages temporary template files which are stored by Koa.
|
||||
* @param {Object} template The template object retrieved from the Koa context object.
|
||||
* @returns {Object} Returns a fs read stream which can be loaded into the database.
|
||||
*/
|
||||
async function getTemplateStream(template: TemplateType) {
|
||||
if (template.file) {
|
||||
return fs.createReadStream(template.file.path)
|
||||
} else if (template.key) {
|
||||
const [type, name] = template.key.split("/")
|
||||
const tmpPath = await exports.downloadTemplate(type, name)
|
||||
return fs.createReadStream(join(tmpPath, name, "db", "dump.txt"))
|
||||
}
|
||||
}
|
||||
|
||||
export async function importApp(
|
||||
appId: string,
|
||||
db: PouchDB.Database,
|
||||
template: TemplateType
|
||||
) {
|
||||
let prodAppId = dbCore.getProdAppID(appId)
|
||||
let dbStream: any
|
||||
if (template.file && template.file.type === "application/gzip") {
|
||||
const tmpPath = join(budibaseTempDir(), uuid())
|
||||
fs.mkdirSync(tmpPath)
|
||||
// extract the tarball
|
||||
tar.extract({
|
||||
sync: true,
|
||||
cwd: tmpPath,
|
||||
file: template.file.path,
|
||||
})
|
||||
const attachmentPath = join(tmpPath, ATTACHMENT_DIR)
|
||||
// have to handle object import
|
||||
if (fs.existsSync(attachmentPath)) {
|
||||
await uploadDirectory(
|
||||
ObjectStoreBuckets.APPS,
|
||||
attachmentPath,
|
||||
join(prodAppId, ATTACHMENT_DIR)
|
||||
)
|
||||
}
|
||||
dbStream = fs.createReadStream(join(tmpPath, DB_EXPORT_FILE))
|
||||
} else {
|
||||
dbStream = await getTemplateStream(template)
|
||||
}
|
||||
// @ts-ignore
|
||||
const { ok } = await db.load(dbStream)
|
||||
if (!ok) {
|
||||
throw "Error loading database dump from template."
|
||||
} else {
|
||||
// TODO: need to iterate over attachments and update their URLs
|
||||
}
|
||||
return ok
|
||||
}
|
|
@ -1 +1,2 @@
|
|||
export * as exports from "./export"
|
||||
export * as exports from "./exports"
|
||||
export * as imports from "./imports"
|
||||
|
|
|
@ -74,21 +74,6 @@ exports.checkDevelopmentEnvironment = () => {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function manages temporary template files which are stored by Koa.
|
||||
* @param {Object} template The template object retrieved from the Koa context object.
|
||||
* @returns {Object} Returns an fs read stream which can be loaded into the database.
|
||||
*/
|
||||
exports.getTemplateStream = async template => {
|
||||
if (template.file) {
|
||||
return fs.createReadStream(template.file.path)
|
||||
} else {
|
||||
const [type, name] = template.key.split("/")
|
||||
const tmpPath = await exports.downloadTemplate(type, name)
|
||||
return fs.createReadStream(join(tmpPath, name, "db", "dump.txt"))
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to retrieve a handlebars file from the system which will be used as a template.
|
||||
* This is allowable as the template handlebars files should be static and identical across
|
||||
|
|
Loading…
Reference in New Issue