Some updates towards supporting attachments in app exports.
This commit is contained in:
parent
3860ec4e5e
commit
bb65cf8ef9
|
@ -18,6 +18,10 @@ const STATE = {
|
||||||
bucketCreationPromises: {},
|
bucketCreationPromises: {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type ListParams = {
|
||||||
|
ContinuationToken?: string
|
||||||
|
}
|
||||||
|
|
||||||
const CONTENT_TYPE_MAP: any = {
|
const CONTENT_TYPE_MAP: any = {
|
||||||
html: "text/html",
|
html: "text/html",
|
||||||
css: "text/css",
|
css: "text/css",
|
||||||
|
@ -93,7 +97,7 @@ export const ObjectStore = (bucket: any) => {
|
||||||
* Given an object store and a bucket name this will make sure the bucket exists,
|
* Given an object store and a bucket name this will make sure the bucket exists,
|
||||||
* if it does not exist then it will create it.
|
* if it does not exist then it will create it.
|
||||||
*/
|
*/
|
||||||
export const makeSureBucketExists = async (client: any, bucketName: any) => {
|
export const makeSureBucketExists = async (client: any, bucketName: string) => {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
try {
|
try {
|
||||||
await client
|
await client
|
||||||
|
@ -168,8 +172,8 @@ export const upload = async ({
|
||||||
* through to the object store.
|
* through to the object store.
|
||||||
*/
|
*/
|
||||||
export const streamUpload = async (
|
export const streamUpload = async (
|
||||||
bucketName: any,
|
bucketName: string,
|
||||||
filename: any,
|
filename: string,
|
||||||
stream: any,
|
stream: any,
|
||||||
extra = {}
|
extra = {}
|
||||||
) => {
|
) => {
|
||||||
|
@ -202,7 +206,7 @@ export const streamUpload = async (
|
||||||
* retrieves the contents of a file from the object store, if it is a known content type it
|
* retrieves the contents of a file from the object store, if it is a known content type it
|
||||||
* will be converted, otherwise it will be returned as a buffer stream.
|
* will be converted, otherwise it will be returned as a buffer stream.
|
||||||
*/
|
*/
|
||||||
export const retrieve = async (bucketName: any, filepath: any) => {
|
export const retrieve = async (bucketName: string, filepath: string) => {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: sanitizeBucket(bucketName),
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
@ -217,10 +221,38 @@ export const retrieve = async (bucketName: any, filepath: any) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const listAllObjects = async (bucketName: string, path: string) => {
|
||||||
|
const objectStore = ObjectStore(bucketName)
|
||||||
|
const list = (params: ListParams = {}) => {
|
||||||
|
return objectStore
|
||||||
|
.listObjectsV2({
|
||||||
|
...params,
|
||||||
|
Bucket: sanitizeBucket(bucketName),
|
||||||
|
Prefix: sanitizeKey(path),
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
}
|
||||||
|
let isTruncated = false,
|
||||||
|
token,
|
||||||
|
objects: AWS.S3.Types.Object[] = []
|
||||||
|
do {
|
||||||
|
let params: ListParams = {}
|
||||||
|
if (token) {
|
||||||
|
params.ContinuationToken = token
|
||||||
|
}
|
||||||
|
const response = await list(params)
|
||||||
|
if (response.Contents) {
|
||||||
|
objects = objects.concat(response.Contents)
|
||||||
|
}
|
||||||
|
isTruncated = !!response.IsTruncated
|
||||||
|
} while (isTruncated)
|
||||||
|
return objects
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Same as retrieval function but puts to a temporary file.
|
* Same as retrieval function but puts to a temporary file.
|
||||||
*/
|
*/
|
||||||
export const retrieveToTmp = async (bucketName: any, filepath: any) => {
|
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
|
||||||
bucketName = sanitizeBucket(bucketName)
|
bucketName = sanitizeBucket(bucketName)
|
||||||
filepath = sanitizeKey(filepath)
|
filepath = sanitizeKey(filepath)
|
||||||
const data = await retrieve(bucketName, filepath)
|
const data = await retrieve(bucketName, filepath)
|
||||||
|
@ -229,10 +261,30 @@ export const retrieveToTmp = async (bucketName: any, filepath: any) => {
|
||||||
return outputPath
|
return outputPath
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export const retrieveDirectory = async (bucketName: string, path: string) => {
|
||||||
|
let writePath = join(budibaseTempDir(), v4())
|
||||||
|
const objects = await listAllObjects(bucketName, path)
|
||||||
|
let fullObjects = await Promise.all(
|
||||||
|
objects.map(obj => retrieve(bucketName, obj.Key!))
|
||||||
|
)
|
||||||
|
let count = 0
|
||||||
|
for (let obj of objects) {
|
||||||
|
const filename = obj.Key!
|
||||||
|
const data = fullObjects[count++]
|
||||||
|
const possiblePath = filename.split("/")
|
||||||
|
if (possiblePath.length > 1) {
|
||||||
|
const dirs = possiblePath.slice(0, possiblePath.length - 1)
|
||||||
|
fs.mkdirSync(join(writePath, ...dirs), { recursive: true })
|
||||||
|
}
|
||||||
|
fs.writeFileSync(join(writePath, ...possiblePath), data)
|
||||||
|
}
|
||||||
|
return writePath
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Delete a single file.
|
* Delete a single file.
|
||||||
*/
|
*/
|
||||||
export const deleteFile = async (bucketName: any, filepath: any) => {
|
export const deleteFile = async (bucketName: string, filepath: string) => {
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
await makeSureBucketExists(objectStore, bucketName)
|
await makeSureBucketExists(objectStore, bucketName)
|
||||||
const params = {
|
const params = {
|
||||||
|
|
|
@ -2,6 +2,9 @@ import { writable } from "svelte/store"
|
||||||
import { AppStatus } from "../../constants"
|
import { AppStatus } from "../../constants"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
|
|
||||||
|
// properties that should always come from the dev app, not the deployed
|
||||||
|
const DEV_PROPS = ["updatedBy", "updatedAt"]
|
||||||
|
|
||||||
const extractAppId = id => {
|
const extractAppId = id => {
|
||||||
const split = id?.split("_") || []
|
const split = id?.split("_") || []
|
||||||
return split.length ? split[split.length - 1] : null
|
return split.length ? split[split.length - 1] : null
|
||||||
|
@ -57,9 +60,19 @@ export function createAppStore() {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let devProps = {}
|
||||||
|
if (appMap[id]) {
|
||||||
|
const entries = Object.entries(appMap[id]).filter(
|
||||||
|
([key]) => DEV_PROPS.indexOf(key) !== -1
|
||||||
|
)
|
||||||
|
entries.forEach(entry => {
|
||||||
|
devProps[entry[0]] = entry[1]
|
||||||
|
})
|
||||||
|
}
|
||||||
appMap[id] = {
|
appMap[id] = {
|
||||||
...appMap[id],
|
...appMap[id],
|
||||||
...app,
|
...app,
|
||||||
|
...devProps,
|
||||||
prodId: app.appId,
|
prodId: app.appId,
|
||||||
prodRev: app._rev,
|
prodRev: app._rev,
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,7 +9,7 @@ export async function exportAppDump(ctx: any) {
|
||||||
excludeRows = isQsTrue(excludeRows)
|
excludeRows = isQsTrue(excludeRows)
|
||||||
const backupIdentifier = `${appName}-export-${new Date().getTime()}.txt`
|
const backupIdentifier = `${appName}-export-${new Date().getTime()}.txt`
|
||||||
ctx.attachment(backupIdentifier)
|
ctx.attachment(backupIdentifier)
|
||||||
ctx.body = await sdk.apps.exports.streamBackup(appId, excludeRows)
|
ctx.body = await sdk.apps.exports.streamExportApp(appId, excludeRows)
|
||||||
|
|
||||||
await context.doInAppContext(appId, async () => {
|
await context.doInAppContext(appId, async () => {
|
||||||
const appDb = context.getAppDB()
|
const appDb = context.getAppDB()
|
||||||
|
|
|
@ -35,7 +35,7 @@ exports.exportApps = async ctx => {
|
||||||
// only export the dev apps as they will be the latest, the user can republish the apps
|
// only export the dev apps as they will be the latest, the user can republish the apps
|
||||||
// in their self hosted environment
|
// in their self hosted environment
|
||||||
if (isDevAppID(appId)) {
|
if (isDevAppID(appId)) {
|
||||||
allDBs[app.name] = await sdk.apps.exports.exportDB(appId)
|
allDBs[app.name] = await sdk.apps.exports.exportApp(appId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const filename = `cloud-export-${new Date().getTime()}.txt`
|
const filename = `cloud-export-${new Date().getTime()}.txt`
|
||||||
|
|
|
@ -5,7 +5,7 @@ require("svelte/register")
|
||||||
const send = require("koa-send")
|
const send = require("koa-send")
|
||||||
const { resolve, join } = require("../../../utilities/centralPath")
|
const { resolve, join } = require("../../../utilities/centralPath")
|
||||||
const uuid = require("uuid")
|
const uuid = require("uuid")
|
||||||
const { ObjectStoreBuckets } = require("../../../constants")
|
const { ObjectStoreBuckets, ATTACHMENT_PATH } = require("../../../constants")
|
||||||
const { processString } = require("@budibase/string-templates")
|
const { processString } = require("@budibase/string-templates")
|
||||||
const {
|
const {
|
||||||
loadHandlebarsFile,
|
loadHandlebarsFile,
|
||||||
|
@ -90,7 +90,7 @@ export const uploadFile = async function (ctx: any) {
|
||||||
|
|
||||||
return prepareUpload({
|
return prepareUpload({
|
||||||
file,
|
file,
|
||||||
s3Key: `${ctx.appId}/attachments/${processedFileName}`,
|
s3Key: `${ctx.appId}/${ATTACHMENT_PATH}/${processedFileName}`,
|
||||||
bucket: ObjectStoreBuckets.APPS,
|
bucket: ObjectStoreBuckets.APPS,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
const { BUILTIN_ROLE_IDS } = require("@budibase/backend-core/roles")
|
||||||
const { UserStatus } = require("@budibase/backend-core/constants")
|
const { UserStatus } = require("@budibase/backend-core/constants")
|
||||||
const { ObjectStoreBuckets } = require("@budibase/backend-core/objectStore")
|
const { objectStore } = require("@budibase/backend-core")
|
||||||
|
|
||||||
exports.JobQueues = {
|
exports.JobQueues = {
|
||||||
AUTOMATIONS: "automationQueue",
|
AUTOMATIONS: "automationQueue",
|
||||||
|
@ -209,6 +209,8 @@ exports.AutomationErrors = {
|
||||||
}
|
}
|
||||||
|
|
||||||
// pass through the list from the auth/core lib
|
// pass through the list from the auth/core lib
|
||||||
exports.ObjectStoreBuckets = ObjectStoreBuckets
|
exports.ObjectStoreBuckets = objectStore.ObjectStoreBuckets
|
||||||
|
|
||||||
|
exports.ATTACHMENT_PATH = "attachments"
|
||||||
|
|
||||||
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5
|
exports.MAX_AUTOMATION_RECURRING_ERRORS = 5
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
import { closeDB, dangerousGetDB, doWithDB } from "@budibase/backend-core/db"
|
import { db as dbCore } from "@budibase/backend-core"
|
||||||
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
||||||
import { streamUpload } from "../../utilities/fileSystem/utilities"
|
import {
|
||||||
import { ObjectStoreBuckets } from "../../constants"
|
streamUpload,
|
||||||
|
retrieveDirectory,
|
||||||
|
} from "../../utilities/fileSystem/utilities"
|
||||||
|
import { ObjectStoreBuckets, ATTACHMENT_PATH } from "../../constants"
|
||||||
import {
|
import {
|
||||||
LINK_USER_METADATA_PREFIX,
|
LINK_USER_METADATA_PREFIX,
|
||||||
TABLE_ROW_PREFIX,
|
TABLE_ROW_PREFIX,
|
||||||
|
@ -25,16 +28,16 @@ export async function exportDB(
|
||||||
) {
|
) {
|
||||||
// streaming a DB dump is a bit more complicated, can't close DB
|
// streaming a DB dump is a bit more complicated, can't close DB
|
||||||
if (opts?.stream) {
|
if (opts?.stream) {
|
||||||
const db = dangerousGetDB(dbName)
|
const db = dbCore.dangerousGetDB(dbName)
|
||||||
const memStream = new MemoryStream()
|
const memStream = new MemoryStream()
|
||||||
memStream.on("end", async () => {
|
memStream.on("end", async () => {
|
||||||
await closeDB(db)
|
await dbCore.closeDB(db)
|
||||||
})
|
})
|
||||||
db.dump(memStream, { filter: opts?.filter })
|
db.dump(memStream, { filter: opts?.filter })
|
||||||
return memStream
|
return memStream
|
||||||
}
|
}
|
||||||
|
|
||||||
return doWithDB(dbName, async (db: any) => {
|
return dbCore.doWithDB(dbName, async (db: any) => {
|
||||||
// Write the dump to file if required
|
// Write the dump to file if required
|
||||||
if (opts?.exportName) {
|
if (opts?.exportName) {
|
||||||
const path = join(budibaseTempDir(), opts?.exportName)
|
const path = join(budibaseTempDir(), opts?.exportName)
|
||||||
|
@ -49,18 +52,17 @@ export async function exportDB(
|
||||||
fs.createReadStream(path)
|
fs.createReadStream(path)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
return fs.createReadStream(path)
|
return fs.createReadStream(path)
|
||||||
|
} else {
|
||||||
|
// Stringify the dump in memory if required
|
||||||
|
const memStream = new MemoryStream()
|
||||||
|
let appString = ""
|
||||||
|
memStream.on("data", (chunk: any) => {
|
||||||
|
appString += chunk.toString()
|
||||||
|
})
|
||||||
|
await db.dump(memStream, { filter: opts?.filter })
|
||||||
|
return appString
|
||||||
}
|
}
|
||||||
|
|
||||||
// Stringify the dump in memory if required
|
|
||||||
const memStream = new MemoryStream()
|
|
||||||
let appString = ""
|
|
||||||
memStream.on("data", (chunk: any) => {
|
|
||||||
appString += chunk.toString()
|
|
||||||
})
|
|
||||||
await db.dump(memStream, { filter: opts?.filter })
|
|
||||||
return appString
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,12 +83,17 @@ function defineFilter(excludeRows?: boolean) {
|
||||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||||
* @returns {*} either a string or a stream of the backup
|
* @returns {*} either a string or a stream of the backup
|
||||||
*/
|
*/
|
||||||
async function backupAppData(
|
export async function exportApp(
|
||||||
appId: string,
|
appId: string,
|
||||||
config: any,
|
config?: any,
|
||||||
excludeRows?: boolean
|
excludeRows?: boolean
|
||||||
) {
|
) {
|
||||||
return await exportDB(appId, {
|
const attachmentsPath = `${dbCore.getProdAppID(appId)}/${ATTACHMENT_PATH}`
|
||||||
|
const tmpPath = await retrieveDirectory(
|
||||||
|
ObjectStoreBuckets.APPS,
|
||||||
|
attachmentsPath
|
||||||
|
)
|
||||||
|
await exportDB(appId, {
|
||||||
...config,
|
...config,
|
||||||
filter: defineFilter(excludeRows),
|
filter: defineFilter(excludeRows),
|
||||||
})
|
})
|
||||||
|
@ -98,16 +105,6 @@ async function backupAppData(
|
||||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||||
* @returns {*} a readable stream of the backup which is written in real time
|
* @returns {*} a readable stream of the backup which is written in real time
|
||||||
*/
|
*/
|
||||||
export async function streamBackup(appId: string, excludeRows: boolean) {
|
export async function streamExportApp(appId: string, excludeRows: boolean) {
|
||||||
return await backupAppData(appId, { stream: true }, excludeRows)
|
return await exportApp(appId, { stream: true }, excludeRows)
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Takes a copy of the database state for an app to the object store.
|
|
||||||
* @param {string} appId The ID of the app which is to be backed up.
|
|
||||||
* @param {string} backupName The name of the backup located in the object store.
|
|
||||||
* @return {*} a readable stream to the completed backup file
|
|
||||||
*/
|
|
||||||
export async function performBackup(appId: string, backupName: string) {
|
|
||||||
return await backupAppData(appId, { exportName: backupName })
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,7 @@ const {
|
||||||
streamUpload,
|
streamUpload,
|
||||||
retrieve,
|
retrieve,
|
||||||
retrieveToTmp,
|
retrieveToTmp,
|
||||||
|
retrieveDirectory,
|
||||||
deleteFolder,
|
deleteFolder,
|
||||||
uploadDirectory,
|
uploadDirectory,
|
||||||
downloadTarball,
|
downloadTarball,
|
||||||
|
@ -27,6 +28,7 @@ exports.upload = upload
|
||||||
exports.streamUpload = streamUpload
|
exports.streamUpload = streamUpload
|
||||||
exports.retrieve = retrieve
|
exports.retrieve = retrieve
|
||||||
exports.retrieveToTmp = retrieveToTmp
|
exports.retrieveToTmp = retrieveToTmp
|
||||||
|
exports.retrieveDirectory = retrieveDirectory
|
||||||
exports.deleteFolder = deleteFolder
|
exports.deleteFolder = deleteFolder
|
||||||
exports.uploadDirectory = uploadDirectory
|
exports.uploadDirectory = uploadDirectory
|
||||||
exports.downloadTarball = downloadTarball
|
exports.downloadTarball = downloadTarball
|
||||||
|
|
|
@ -99,16 +99,7 @@ export const paginatedUsers = async ({
|
||||||
*/
|
*/
|
||||||
export const getUser = async (userId: string) => {
|
export const getUser = async (userId: string) => {
|
||||||
const db = tenancy.getGlobalDB()
|
const db = tenancy.getGlobalDB()
|
||||||
let user
|
let user = await db.get(userId)
|
||||||
try {
|
|
||||||
user = await db.get(userId)
|
|
||||||
} catch (err: any) {
|
|
||||||
// no user found, just return nothing
|
|
||||||
if (err.status === 404) {
|
|
||||||
return {}
|
|
||||||
}
|
|
||||||
throw err
|
|
||||||
}
|
|
||||||
if (user) {
|
if (user) {
|
||||||
delete user.password
|
delete user.password
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue