Merge pull request #10874 from Budibase/budi-7010-encrypt-app-exports
BUDI-7010 - Encrypt app exports
This commit is contained in:
commit
b7e37906c7
|
@ -1,12 +1,17 @@
|
|||
import crypto from "crypto"
|
||||
import fs from "fs"
|
||||
import zlib from "zlib"
|
||||
import env from "../environment"
|
||||
import { join } from "path"
|
||||
|
||||
const ALGO = "aes-256-ctr"
|
||||
const SEPARATOR = "-"
|
||||
const ITERATIONS = 10000
|
||||
const RANDOM_BYTES = 16
|
||||
const STRETCH_LENGTH = 32
|
||||
|
||||
const SALT_LENGTH = 16
|
||||
const IV_LENGTH = 16
|
||||
|
||||
export enum SecretOption {
|
||||
API = "api",
|
||||
ENCRYPTION = "encryption",
|
||||
|
@ -31,15 +36,15 @@ export function getSecret(secretOption: SecretOption): string {
|
|||
return secret
|
||||
}
|
||||
|
||||
function stretchString(string: string, salt: Buffer) {
|
||||
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||
function stretchString(secret: string, salt: Buffer) {
|
||||
return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
|
||||
}
|
||||
|
||||
export function encrypt(
|
||||
input: string,
|
||||
secretOption: SecretOption = SecretOption.API
|
||||
) {
|
||||
const salt = crypto.randomBytes(RANDOM_BYTES)
|
||||
const salt = crypto.randomBytes(SALT_LENGTH)
|
||||
const stretched = stretchString(getSecret(secretOption), salt)
|
||||
const cipher = crypto.createCipheriv(ALGO, stretched, salt)
|
||||
const base = cipher.update(input)
|
||||
|
@ -60,3 +65,115 @@ export function decrypt(
|
|||
const final = decipher.final()
|
||||
return Buffer.concat([base, final]).toString()
|
||||
}
|
||||
|
||||
export async function encryptFile(
|
||||
{ dir, filename }: { dir: string; filename: string },
|
||||
secret: string
|
||||
) {
|
||||
const outputFileName = `${filename}.enc`
|
||||
|
||||
const filePath = join(dir, filename)
|
||||
const inputFile = fs.createReadStream(filePath)
|
||||
const outputFile = fs.createWriteStream(join(dir, outputFileName))
|
||||
|
||||
const salt = crypto.randomBytes(SALT_LENGTH)
|
||||
const iv = crypto.randomBytes(IV_LENGTH)
|
||||
const stretched = stretchString(secret, salt)
|
||||
const cipher = crypto.createCipheriv(ALGO, stretched, iv)
|
||||
|
||||
outputFile.write(salt)
|
||||
outputFile.write(iv)
|
||||
|
||||
inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)
|
||||
|
||||
return new Promise<{ filename: string; dir: string }>(r => {
|
||||
outputFile.on("finish", () => {
|
||||
r({
|
||||
filename: outputFileName,
|
||||
dir,
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async function getSaltAndIV(path: string) {
|
||||
const fileStream = fs.createReadStream(path)
|
||||
|
||||
const salt = await readBytes(fileStream, SALT_LENGTH)
|
||||
const iv = await readBytes(fileStream, IV_LENGTH)
|
||||
fileStream.close()
|
||||
return { salt, iv }
|
||||
}
|
||||
|
||||
export async function decryptFile(
|
||||
inputPath: string,
|
||||
outputPath: string,
|
||||
secret: string
|
||||
) {
|
||||
const { salt, iv } = await getSaltAndIV(inputPath)
|
||||
const inputFile = fs.createReadStream(inputPath, {
|
||||
start: SALT_LENGTH + IV_LENGTH,
|
||||
})
|
||||
|
||||
const outputFile = fs.createWriteStream(outputPath)
|
||||
|
||||
const stretched = stretchString(secret, salt)
|
||||
const decipher = crypto.createDecipheriv(ALGO, stretched, iv)
|
||||
|
||||
const unzip = zlib.createGunzip()
|
||||
|
||||
inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)
|
||||
|
||||
return new Promise<void>((res, rej) => {
|
||||
outputFile.on("finish", () => {
|
||||
outputFile.close()
|
||||
res()
|
||||
})
|
||||
|
||||
inputFile.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
decipher.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
unzip.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
|
||||
outputFile.on("error", e => {
|
||||
outputFile.close()
|
||||
rej(e)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
function readBytes(stream: fs.ReadStream, length: number) {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
let bytesRead = 0
|
||||
const data: Buffer[] = []
|
||||
|
||||
stream.on("readable", () => {
|
||||
let chunk
|
||||
|
||||
while ((chunk = stream.read(length - bytesRead)) !== null) {
|
||||
data.push(chunk)
|
||||
bytesRead += chunk.length
|
||||
}
|
||||
|
||||
resolve(Buffer.concat(data))
|
||||
})
|
||||
|
||||
stream.on("end", () => {
|
||||
reject(new Error("Insufficient data in the stream."))
|
||||
})
|
||||
|
||||
stream.on("error", error => {
|
||||
reject(error)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
"pouchdb": "7.3.0",
|
||||
"pouchdb-replication-stream": "1.2.9",
|
||||
"randomstring": "1.1.5",
|
||||
"tar": "6.1.11",
|
||||
"tar": "6.1.15",
|
||||
"yaml": "^2.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
|
|
@ -117,7 +117,7 @@
|
|||
"socket.io": "4.6.1",
|
||||
"svelte": "3.49.0",
|
||||
"swagger-parser": "10.0.3",
|
||||
"tar": "6.1.11",
|
||||
"tar": "6.1.15",
|
||||
"to-json-schema": "0.2.5",
|
||||
"uuid": "3.3.2",
|
||||
"validate.js": "0.13.1",
|
||||
|
@ -150,7 +150,7 @@
|
|||
"@types/redis": "4.0.11",
|
||||
"@types/server-destroy": "1.0.1",
|
||||
"@types/supertest": "2.0.12",
|
||||
"@types/tar": "6.1.3",
|
||||
"@types/tar": "6.1.5",
|
||||
"@typescript-eslint/parser": "5.45.0",
|
||||
"apidoc": "0.50.4",
|
||||
"babel-jest": "29.5.0",
|
||||
|
|
|
@ -5,11 +5,12 @@ import { Ctx } from "@budibase/types"
|
|||
|
||||
interface ExportAppDumpRequest {
|
||||
excludeRows: boolean
|
||||
encryptPassword?: string
|
||||
}
|
||||
|
||||
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
|
||||
const { appId } = ctx.query as any
|
||||
const { excludeRows } = ctx.request.body
|
||||
const { excludeRows, encryptPassword } = ctx.request.body
|
||||
|
||||
const [app] = await db.getAppsByIDs([appId])
|
||||
const appName = app.name
|
||||
|
@ -17,9 +18,14 @@ export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
|
|||
// remove the 120 second limit for the request
|
||||
ctx.req.setTimeout(0)
|
||||
|
||||
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz`
|
||||
const extension = encryptPassword ? "enc.tar.gz" : "tar.gz"
|
||||
const backupIdentifier = `${appName}-export-${new Date().getTime()}.${extension}`
|
||||
ctx.attachment(backupIdentifier)
|
||||
ctx.body = await sdk.backups.streamExportApp(appId, excludeRows)
|
||||
ctx.body = await sdk.backups.streamExportApp({
|
||||
appId,
|
||||
excludeRows,
|
||||
encryptPassword,
|
||||
})
|
||||
|
||||
await context.doInAppContext(appId, async () => {
|
||||
const appDb = context.getAppDB()
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { db as dbCore, objectStore } from "@budibase/backend-core"
|
||||
import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
|
||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||
import { streamFile, createTempFolder } from "../../../utilities/fileSystem"
|
||||
import { ObjectStoreBuckets } from "../../../constants"
|
||||
|
@ -18,7 +18,8 @@ import { join } from "path"
|
|||
import env from "../../../environment"
|
||||
|
||||
const uuid = require("uuid/v4")
|
||||
const tar = require("tar")
|
||||
import tar from "tar"
|
||||
|
||||
const MemoryStream = require("memorystream")
|
||||
|
||||
interface DBDumpOpts {
|
||||
|
@ -30,16 +31,18 @@ interface ExportOpts extends DBDumpOpts {
|
|||
tar?: boolean
|
||||
excludeRows?: boolean
|
||||
excludeLogs?: boolean
|
||||
encryptPassword?: string
|
||||
}
|
||||
|
||||
function tarFilesToTmp(tmpDir: string, files: string[]) {
|
||||
const exportFile = join(budibaseTempDir(), `${uuid()}.tar.gz`)
|
||||
const fileName = `${uuid()}.tar.gz`
|
||||
const exportFile = join(budibaseTempDir(), fileName)
|
||||
tar.create(
|
||||
{
|
||||
sync: true,
|
||||
gzip: true,
|
||||
file: exportFile,
|
||||
recursive: true,
|
||||
noDirRecurse: false,
|
||||
cwd: tmpDir,
|
||||
},
|
||||
files
|
||||
|
@ -124,6 +127,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
)
|
||||
}
|
||||
}
|
||||
|
||||
const downloadedPath = join(tmpPath, appPath)
|
||||
if (fs.existsSync(downloadedPath)) {
|
||||
const allFiles = fs.readdirSync(downloadedPath)
|
||||
|
@ -141,12 +145,27 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
filter: defineFilter(config?.excludeRows, config?.excludeLogs),
|
||||
exportPath: dbPath,
|
||||
})
|
||||
|
||||
if (config?.encryptPassword) {
|
||||
for (let file of fs.readdirSync(tmpPath)) {
|
||||
const path = join(tmpPath, file)
|
||||
|
||||
await encryption.encryptFile(
|
||||
{ dir: tmpPath, filename: file },
|
||||
config.encryptPassword
|
||||
)
|
||||
|
||||
fs.rmSync(path)
|
||||
}
|
||||
}
|
||||
|
||||
// if tar requested, return where the tarball is
|
||||
if (config?.tar) {
|
||||
// now the tmpPath contains both the DB export and attachments, tar this
|
||||
const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
|
||||
// cleanup the tmp export files as tarball returned
|
||||
fs.rmSync(tmpPath, { recursive: true, force: true })
|
||||
|
||||
return tarPath
|
||||
}
|
||||
// tar not requested, turn the directory where export is
|
||||
|
@ -161,11 +180,20 @@ export async function exportApp(appId: string, config?: ExportOpts) {
|
|||
* @param {boolean} excludeRows Flag to state whether the export should include data.
|
||||
* @returns {*} a readable stream of the backup which is written in real time
|
||||
*/
|
||||
export async function streamExportApp(appId: string, excludeRows: boolean) {
|
||||
export async function streamExportApp({
|
||||
appId,
|
||||
excludeRows,
|
||||
encryptPassword,
|
||||
}: {
|
||||
appId: string
|
||||
excludeRows: boolean
|
||||
encryptPassword?: string
|
||||
}) {
|
||||
const tmpPath = await exportApp(appId, {
|
||||
excludeRows,
|
||||
excludeLogs: true,
|
||||
tar: true,
|
||||
encryptPassword,
|
||||
})
|
||||
return streamFile(tmpPath)
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { db as dbCore, objectStore } from "@budibase/backend-core"
|
||||
import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
|
||||
import { Database, Row } from "@budibase/types"
|
||||
import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils"
|
||||
import { budibaseTempDir } from "../../../utilities/budibaseDir"
|
||||
|
@ -20,6 +20,7 @@ type TemplateType = {
|
|||
file?: {
|
||||
type: string
|
||||
path: string
|
||||
password?: string
|
||||
}
|
||||
key?: string
|
||||
}
|
||||
|
@ -123,6 +124,22 @@ export function untarFile(file: { path: string }) {
|
|||
return tmpPath
|
||||
}
|
||||
|
||||
async function decryptFiles(path: string, password: string) {
|
||||
try {
|
||||
for (let file of fs.readdirSync(path)) {
|
||||
const inputPath = join(path, file)
|
||||
const outputPath = inputPath.replace(/\.enc$/, "")
|
||||
await encryption.decryptFile(inputPath, outputPath, password)
|
||||
fs.rmSync(inputPath)
|
||||
}
|
||||
} catch (err: any) {
|
||||
if (err.message === "incorrect header check") {
|
||||
throw new Error("File cannot be imported")
|
||||
}
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
||||
export function getGlobalDBFile(tmpPath: string) {
|
||||
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
|
||||
}
|
||||
|
@ -143,6 +160,9 @@ export async function importApp(
|
|||
template.file && fs.lstatSync(template.file.path).isDirectory()
|
||||
if (template.file && (isTar || isDirectory)) {
|
||||
const tmpPath = isTar ? untarFile(template.file) : template.file.path
|
||||
if (isTar && template.file.password) {
|
||||
await decryptFiles(tmpPath, template.file.password)
|
||||
}
|
||||
const contents = fs.readdirSync(tmpPath)
|
||||
// have to handle object import
|
||||
if (contents.length) {
|
||||
|
|
24
yarn.lock
24
yarn.lock
|
@ -6197,13 +6197,13 @@
|
|||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/tar@6.1.3":
|
||||
version "6.1.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.3.tgz#46a2ce7617950c4852dfd7e9cd41aa8161b9d750"
|
||||
integrity sha512-YzDOr5kdAeqS8dcO6NTTHTMJ44MUCBDoLEIyPtwEn7PssKqUYL49R1iCVJPeiPzPlKi6DbH33eZkpeJ27e4vHg==
|
||||
"@types/tar@6.1.5":
|
||||
version "6.1.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.5.tgz#90ccb3b6a35430e7427410d50eed564e85feaaff"
|
||||
integrity sha512-qm2I/RlZij5RofuY7vohTpYNaYcrSQlN2MyjucQc7ZweDwaEWkdN/EeNh6e9zjK6uEm6PwjdMXkcj05BxZdX1Q==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
minipass "^3.3.5"
|
||||
minipass "^4.0.0"
|
||||
|
||||
"@types/tern@*":
|
||||
version "0.23.4"
|
||||
|
@ -18034,7 +18034,7 @@ minipass-sized@^1.0.3:
|
|||
dependencies:
|
||||
minipass "^3.0.0"
|
||||
|
||||
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6, minipass@^3.3.5:
|
||||
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6:
|
||||
version "3.3.6"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
|
||||
integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
|
||||
|
@ -24160,6 +24160,18 @@ tar@6.1.11:
|
|||
mkdirp "^1.0.3"
|
||||
yallist "^4.0.0"
|
||||
|
||||
tar@6.1.15:
|
||||
version "6.1.15"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
|
||||
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
|
||||
dependencies:
|
||||
chownr "^2.0.0"
|
||||
fs-minipass "^2.0.0"
|
||||
minipass "^5.0.0"
|
||||
minizlib "^2.1.1"
|
||||
mkdirp "^1.0.3"
|
||||
yallist "^4.0.0"
|
||||
|
||||
tar@^6.1.11, tar@^6.1.2:
|
||||
version "6.1.13"
|
||||
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b"
|
||||
|
|
Loading…
Reference in New Issue