Building out initial SDK work - converting some existing exporting work to typescript.
This commit is contained in:
parent
cd57ff6ced
commit
06599e0f17
|
@ -1,15 +1,15 @@
|
|||
const { streamBackup } = require("../../utilities/fileSystem")
|
||||
const { events, context } = require("@budibase/backend-core")
|
||||
const { DocumentType } = require("../../db/utils")
|
||||
const { isQsTrue } = require("../../utilities")
|
||||
import sdk from "../../sdk"
|
||||
import { events, context } from "@budibase/backend-core"
|
||||
import { DocumentType } from "../../db/utils"
|
||||
import { isQsTrue } from "../../utilities"
|
||||
|
||||
exports.exportAppDump = async function (ctx) {
|
||||
export async function exportAppDump(ctx: any) {
|
||||
let { appId, excludeRows } = ctx.query
|
||||
const appName = decodeURI(ctx.query.appname)
|
||||
excludeRows = isQsTrue(excludeRows)
|
||||
const backupIdentifier = `${appName}-export-${new Date().getTime()}.txt`
|
||||
ctx.attachment(backupIdentifier)
|
||||
ctx.body = await streamBackup(appId, excludeRows)
|
||||
ctx.body = await sdk.apps.exports.streamBackup(appId, excludeRows)
|
||||
|
||||
await context.doInAppContext(appId, async () => {
|
||||
const appDb = context.getAppDB()
|
|
@ -1,14 +1,11 @@
|
|||
const env = require("../../environment")
|
||||
const { getAllApps, getGlobalDBName } = require("@budibase/backend-core/db")
|
||||
const {
|
||||
exportDB,
|
||||
sendTempFile,
|
||||
readFileSync,
|
||||
} = require("../../utilities/fileSystem")
|
||||
const { sendTempFile, readFileSync } = require("../../utilities/fileSystem")
|
||||
const { stringToReadStream } = require("../../utilities")
|
||||
const { getGlobalDB } = require("@budibase/backend-core/tenancy")
|
||||
const { create } = require("./application")
|
||||
const { getDocParams, DocumentType, isDevAppID } = require("../../db/utils")
|
||||
const sdk = require("../../sdk")
|
||||
|
||||
async function createApp(appName, appImport) {
|
||||
const ctx = {
|
||||
|
@ -27,7 +24,7 @@ exports.exportApps = async ctx => {
|
|||
ctx.throw(400, "Exporting only allowed in multi-tenant cloud environments.")
|
||||
}
|
||||
const apps = await getAllApps({ all: true })
|
||||
const globalDBString = await exportDB(getGlobalDBName(), {
|
||||
const globalDBString = await sdk.apps.exports.exportDB(getGlobalDBName(), {
|
||||
filter: doc => !doc._id.startsWith(DocumentType.USER),
|
||||
})
|
||||
let allDBs = {
|
||||
|
@ -38,7 +35,7 @@ exports.exportApps = async ctx => {
|
|||
// only export the dev apps as they will be the latest, the user can republish the apps
|
||||
// in their self hosted environment
|
||||
if (isDevAppID(appId)) {
|
||||
allDBs[app.name] = await exportDB(appId)
|
||||
allDBs[app.name] = await sdk.apps.exports.exportDB(appId)
|
||||
}
|
||||
}
|
||||
const filename = `cloud-export-${new Date().getTime()}.txt`
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
const Router = require("@koa/router")
|
||||
const controller = require("../controllers/backup")
|
||||
const authorized = require("../../middleware/authorized")
|
||||
const { BUILDER } = require("@budibase/backend-core/permissions")
|
||||
|
||||
const router = new Router()
|
||||
|
||||
router.get("/api/backups/export", authorized(BUILDER), controller.exportAppDump)
|
||||
|
||||
module.exports = router
|
|
@ -0,0 +1,10 @@
|
|||
import Router from "@koa/router"
|
||||
import * as controller from "../controllers/backup"
|
||||
import authorized from "../../middleware/authorized"
|
||||
import { BUILDER } from "@budibase/backend-core/permissions"
|
||||
|
||||
const router = new Router()
|
||||
|
||||
router.get("/api/backups/export", authorized(BUILDER), controller.exportAppDump)
|
||||
|
||||
export default router
|
|
@ -0,0 +1,113 @@
|
|||
import { closeDB, dangerousGetDB, doWithDB } from "@budibase/backend-core/db"
|
||||
import { budibaseTempDir } from "../../utilities/budibaseDir"
|
||||
import { streamUpload } from "../../utilities/fileSystem/utilities"
|
||||
import { ObjectStoreBuckets } from "../../constants"
|
||||
import {
|
||||
LINK_USER_METADATA_PREFIX,
|
||||
TABLE_ROW_PREFIX,
|
||||
USER_METDATA_PREFIX,
|
||||
} from "../../db/utils"
|
||||
import fs from "fs"
|
||||
import env from "../../environment"
|
||||
import { join } from "path"
|
||||
const MemoryStream = require("memorystream")
|
||||
|
||||
/**
|
||||
* Exports a DB to either file or a variable (memory).
|
||||
* @param {string} dbName the DB which is to be exported.
|
||||
* @param {object} opts various options for the export, e.g. whether to stream,
|
||||
* a filter function or the name of the export.
|
||||
* @return {*} either a readable stream or a string
|
||||
*/
|
||||
export async function exportDB(
|
||||
dbName: string,
|
||||
opts: { stream?: boolean; filter?: any; exportName?: string } = {}
|
||||
) {
|
||||
// streaming a DB dump is a bit more complicated, can't close DB
|
||||
if (opts?.stream) {
|
||||
const db = dangerousGetDB(dbName)
|
||||
const memStream = new MemoryStream()
|
||||
memStream.on("end", async () => {
|
||||
await closeDB(db)
|
||||
})
|
||||
db.dump(memStream, { filter: opts?.filter })
|
||||
return memStream
|
||||
}
|
||||
|
||||
return doWithDB(dbName, async (db: any) => {
|
||||
// Write the dump to file if required
|
||||
if (opts?.exportName) {
|
||||
const path = join(budibaseTempDir(), opts?.exportName)
|
||||
const writeStream = fs.createWriteStream(path)
|
||||
await db.dump(writeStream, { filter: opts?.filter })
|
||||
|
||||
// Upload the dump to the object store if self-hosted
|
||||
if (env.SELF_HOSTED) {
|
||||
await streamUpload(
|
||||
ObjectStoreBuckets.BACKUPS,
|
||||
join(dbName, opts?.exportName),
|
||||
fs.createReadStream(path)
|
||||
)
|
||||
}
|
||||
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
// Stringify the dump in memory if required
|
||||
const memStream = new MemoryStream()
|
||||
let appString = ""
|
||||
memStream.on("data", (chunk: any) => {
|
||||
appString += chunk.toString()
|
||||
})
|
||||
await db.dump(memStream, { filter: opts?.filter })
|
||||
return appString
|
||||
})
|
||||
}
|
||||
|
||||
function defineFilter(excludeRows?: boolean) {
|
||||
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
|
||||
if (excludeRows) {
|
||||
ids.push(TABLE_ROW_PREFIX)
|
||||
}
|
||||
return (doc: any) =>
|
||||
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
|
||||
}
|
||||
|
||||
/**
|
||||
* Local utility to back up the database state for an app, excluding global user
|
||||
* data or user relationships.
|
||||
* @param {string} appId The app to back up
|
||||
* @param {object} config Config to send to export DB
|
||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||
* @returns {*} either a string or a stream of the backup
|
||||
*/
|
||||
async function backupAppData(
|
||||
appId: string,
|
||||
config: any,
|
||||
excludeRows?: boolean
|
||||
) {
|
||||
return await exportDB(appId, {
|
||||
...config,
|
||||
filter: defineFilter(excludeRows),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Streams a backup of the database state for an app
|
||||
* @param {string} appId The ID of the app which is to be backed up.
|
||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||
* @returns {*} a readable stream of the backup which is written in real time
|
||||
*/
|
||||
export async function streamBackup(appId: string, excludeRows: boolean) {
|
||||
return await backupAppData(appId, { stream: true }, excludeRows)
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a copy of the database state for an app to the object store.
|
||||
* @param {string} appId The ID of the app which is to be backed up.
|
||||
* @param {string} backupName The name of the backup located in the object store.
|
||||
* @return {*} a readable stream to the completed backup file
|
||||
*/
|
||||
export async function performBackup(appId: string, backupName: string) {
|
||||
return await backupAppData(appId, { exportName: backupName })
|
||||
}
|
|
@ -0,0 +1 @@
|
|||
export * as exports from "./export"
|
|
@ -0,0 +1,5 @@
|
|||
import * as apps from "./app"
|
||||
|
||||
export default {
|
||||
apps,
|
||||
}
|
|
@ -2,17 +2,11 @@ const { budibaseTempDir } = require("../budibaseDir")
|
|||
const fs = require("fs")
|
||||
const { join } = require("path")
|
||||
const uuid = require("uuid/v4")
|
||||
const {
|
||||
doWithDB,
|
||||
dangerousGetDB,
|
||||
closeDB,
|
||||
} = require("@budibase/backend-core/db")
|
||||
const { ObjectStoreBuckets } = require("../../constants")
|
||||
const {
|
||||
upload,
|
||||
retrieve,
|
||||
retrieveToTmp,
|
||||
streamUpload,
|
||||
deleteFolder,
|
||||
downloadTarball,
|
||||
downloadTarballDirect,
|
||||
|
@ -21,12 +15,6 @@ const {
|
|||
const { updateClientLibrary } = require("./clientLibrary")
|
||||
const { checkSlashesInUrl } = require("../")
|
||||
const env = require("../../environment")
|
||||
const {
|
||||
USER_METDATA_PREFIX,
|
||||
LINK_USER_METADATA_PREFIX,
|
||||
TABLE_ROW_PREFIX,
|
||||
} = require("../../db/utils")
|
||||
const MemoryStream = require("memorystream")
|
||||
const { getAppId } = require("@budibase/backend-core/context")
|
||||
const tar = require("tar")
|
||||
const fetch = require("node-fetch")
|
||||
|
@ -124,100 +112,6 @@ exports.apiFileReturn = contents => {
|
|||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
exports.defineFilter = excludeRows => {
|
||||
const ids = [USER_METDATA_PREFIX, LINK_USER_METADATA_PREFIX]
|
||||
if (excludeRows) {
|
||||
ids.push(TABLE_ROW_PREFIX)
|
||||
}
|
||||
return doc =>
|
||||
!ids.map(key => doc._id.includes(key)).reduce((prev, curr) => prev || curr)
|
||||
}
|
||||
|
||||
/**
|
||||
* Local utility to back up the database state for an app, excluding global user
|
||||
* data or user relationships.
|
||||
* @param {string} appId The app to backup
|
||||
* @param {object} config Config to send to export DB
|
||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||
* @returns {*} either a string or a stream of the backup
|
||||
*/
|
||||
const backupAppData = async (appId, config, excludeRows) => {
|
||||
return await exports.exportDB(appId, {
|
||||
...config,
|
||||
filter: exports.defineFilter(excludeRows),
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes a copy of the database state for an app to the object store.
|
||||
* @param {string} appId The ID of the app which is to be backed up.
|
||||
* @param {string} backupName The name of the backup located in the object store.
|
||||
* @return {*} a readable stream to the completed backup file
|
||||
*/
|
||||
exports.performBackup = async (appId, backupName) => {
|
||||
return await backupAppData(appId, { exportName: backupName })
|
||||
}
|
||||
|
||||
/**
|
||||
* Streams a backup of the database state for an app
|
||||
* @param {string} appId The ID of the app which is to be backed up.
|
||||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||
* @returns {*} a readable stream of the backup which is written in real time
|
||||
*/
|
||||
exports.streamBackup = async (appId, excludeRows) => {
|
||||
return await backupAppData(appId, { stream: true }, excludeRows)
|
||||
}
|
||||
|
||||
/**
|
||||
* Exports a DB to either file or a variable (memory).
|
||||
* @param {string} dbName the DB which is to be exported.
|
||||
* @param {string} exportName optional - provide a filename to write the backup to a file
|
||||
* @param {boolean} stream optional - whether to perform a full backup
|
||||
* @param {function} filter optional - a filter function to clear out any un-wanted docs.
|
||||
* @return {*} either a readable stream or a string
|
||||
*/
|
||||
exports.exportDB = async (dbName, { stream, filter, exportName } = {}) => {
|
||||
// streaming a DB dump is a bit more complicated, can't close DB
|
||||
if (stream) {
|
||||
const db = dangerousGetDB(dbName)
|
||||
const memStream = new MemoryStream()
|
||||
memStream.on("end", async () => {
|
||||
await closeDB(db)
|
||||
})
|
||||
db.dump(memStream, { filter })
|
||||
return memStream
|
||||
}
|
||||
|
||||
return doWithDB(dbName, async db => {
|
||||
// Write the dump to file if required
|
||||
if (exportName) {
|
||||
const path = join(budibaseTempDir(), exportName)
|
||||
const writeStream = fs.createWriteStream(path)
|
||||
await db.dump(writeStream, { filter })
|
||||
|
||||
// Upload the dump to the object store if self hosted
|
||||
if (env.SELF_HOSTED) {
|
||||
await streamUpload(
|
||||
ObjectStoreBuckets.BACKUPS,
|
||||
join(dbName, exportName),
|
||||
fs.createReadStream(path)
|
||||
)
|
||||
}
|
||||
|
||||
return fs.createReadStream(path)
|
||||
}
|
||||
|
||||
// Stringify the dump in memory if required
|
||||
const memStream = new MemoryStream()
|
||||
let appString = ""
|
||||
memStream.on("data", chunk => {
|
||||
appString += chunk.toString()
|
||||
})
|
||||
await db.dump(memStream, { filter })
|
||||
return appString
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the provided contents to a temporary file, which can be used briefly.
|
||||
* @param {string} fileContents contents which will be written to a temp file.
|
||||
|
|
Loading…
Reference in New Issue