Merge pull request #10874 from Budibase/budi-7010-encrypt-app-exports

BUDI-7010 - Encrypt app exports
This commit is contained in:
Adria Navarro 2023-06-14 14:58:23 +01:00 committed by GitHub
commit b7e37906c7
7 changed files with 205 additions and 22 deletions

View File

@ -1,12 +1,17 @@
import crypto from "crypto" import crypto from "crypto"
import fs from "fs"
import zlib from "zlib"
import env from "../environment" import env from "../environment"
import { join } from "path"
const ALGO = "aes-256-ctr" const ALGO = "aes-256-ctr"
const SEPARATOR = "-" const SEPARATOR = "-"
const ITERATIONS = 10000 const ITERATIONS = 10000
const RANDOM_BYTES = 16
const STRETCH_LENGTH = 32 const STRETCH_LENGTH = 32
const SALT_LENGTH = 16
const IV_LENGTH = 16
export enum SecretOption { export enum SecretOption {
API = "api", API = "api",
ENCRYPTION = "encryption", ENCRYPTION = "encryption",
@ -31,15 +36,15 @@ export function getSecret(secretOption: SecretOption): string {
return secret return secret
} }
function stretchString(string: string, salt: Buffer) { function stretchString(secret: string, salt: Buffer) {
return crypto.pbkdf2Sync(string, salt, ITERATIONS, STRETCH_LENGTH, "sha512") return crypto.pbkdf2Sync(secret, salt, ITERATIONS, STRETCH_LENGTH, "sha512")
} }
export function encrypt( export function encrypt(
input: string, input: string,
secretOption: SecretOption = SecretOption.API secretOption: SecretOption = SecretOption.API
) { ) {
const salt = crypto.randomBytes(RANDOM_BYTES) const salt = crypto.randomBytes(SALT_LENGTH)
const stretched = stretchString(getSecret(secretOption), salt) const stretched = stretchString(getSecret(secretOption), salt)
const cipher = crypto.createCipheriv(ALGO, stretched, salt) const cipher = crypto.createCipheriv(ALGO, stretched, salt)
const base = cipher.update(input) const base = cipher.update(input)
@ -60,3 +65,115 @@ export function decrypt(
const final = decipher.final() const final = decipher.final()
return Buffer.concat([base, final]).toString() return Buffer.concat([base, final]).toString()
} }
export async function encryptFile(
{ dir, filename }: { dir: string; filename: string },
secret: string
) {
const outputFileName = `${filename}.enc`
const filePath = join(dir, filename)
const inputFile = fs.createReadStream(filePath)
const outputFile = fs.createWriteStream(join(dir, outputFileName))
const salt = crypto.randomBytes(SALT_LENGTH)
const iv = crypto.randomBytes(IV_LENGTH)
const stretched = stretchString(secret, salt)
const cipher = crypto.createCipheriv(ALGO, stretched, iv)
outputFile.write(salt)
outputFile.write(iv)
inputFile.pipe(zlib.createGzip()).pipe(cipher).pipe(outputFile)
return new Promise<{ filename: string; dir: string }>(r => {
outputFile.on("finish", () => {
r({
filename: outputFileName,
dir,
})
})
})
}
async function getSaltAndIV(path: string) {
const fileStream = fs.createReadStream(path)
const salt = await readBytes(fileStream, SALT_LENGTH)
const iv = await readBytes(fileStream, IV_LENGTH)
fileStream.close()
return { salt, iv }
}
export async function decryptFile(
inputPath: string,
outputPath: string,
secret: string
) {
const { salt, iv } = await getSaltAndIV(inputPath)
const inputFile = fs.createReadStream(inputPath, {
start: SALT_LENGTH + IV_LENGTH,
})
const outputFile = fs.createWriteStream(outputPath)
const stretched = stretchString(secret, salt)
const decipher = crypto.createDecipheriv(ALGO, stretched, iv)
const unzip = zlib.createGunzip()
inputFile.pipe(decipher).pipe(unzip).pipe(outputFile)
return new Promise<void>((res, rej) => {
outputFile.on("finish", () => {
outputFile.close()
res()
})
inputFile.on("error", e => {
outputFile.close()
rej(e)
})
decipher.on("error", e => {
outputFile.close()
rej(e)
})
unzip.on("error", e => {
outputFile.close()
rej(e)
})
outputFile.on("error", e => {
outputFile.close()
rej(e)
})
})
}
function readBytes(stream: fs.ReadStream, length: number) {
return new Promise<Buffer>((resolve, reject) => {
let bytesRead = 0
const data: Buffer[] = []
stream.on("readable", () => {
let chunk
while ((chunk = stream.read(length - bytesRead)) !== null) {
data.push(chunk)
bytesRead += chunk.length
}
resolve(Buffer.concat(data))
})
stream.on("end", () => {
reject(new Error("Insufficient data in the stream."))
})
stream.on("error", error => {
reject(error)
})
})
}

View File

@ -49,7 +49,7 @@
"pouchdb": "7.3.0", "pouchdb": "7.3.0",
"pouchdb-replication-stream": "1.2.9", "pouchdb-replication-stream": "1.2.9",
"randomstring": "1.1.5", "randomstring": "1.1.5",
"tar": "6.1.11", "tar": "6.1.15",
"yaml": "^2.1.1" "yaml": "^2.1.1"
}, },
"devDependencies": { "devDependencies": {

View File

@ -117,7 +117,7 @@
"socket.io": "4.6.1", "socket.io": "4.6.1",
"svelte": "3.49.0", "svelte": "3.49.0",
"swagger-parser": "10.0.3", "swagger-parser": "10.0.3",
"tar": "6.1.11", "tar": "6.1.15",
"to-json-schema": "0.2.5", "to-json-schema": "0.2.5",
"uuid": "3.3.2", "uuid": "3.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
@ -150,7 +150,7 @@
"@types/redis": "4.0.11", "@types/redis": "4.0.11",
"@types/server-destroy": "1.0.1", "@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.12", "@types/supertest": "2.0.12",
"@types/tar": "6.1.3", "@types/tar": "6.1.5",
"@typescript-eslint/parser": "5.45.0", "@typescript-eslint/parser": "5.45.0",
"apidoc": "0.50.4", "apidoc": "0.50.4",
"babel-jest": "29.5.0", "babel-jest": "29.5.0",

View File

@ -5,11 +5,12 @@ import { Ctx } from "@budibase/types"
interface ExportAppDumpRequest { interface ExportAppDumpRequest {
excludeRows: boolean excludeRows: boolean
encryptPassword?: string
} }
export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) { export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
const { appId } = ctx.query as any const { appId } = ctx.query as any
const { excludeRows } = ctx.request.body const { excludeRows, encryptPassword } = ctx.request.body
const [app] = await db.getAppsByIDs([appId]) const [app] = await db.getAppsByIDs([appId])
const appName = app.name const appName = app.name
@ -17,9 +18,14 @@ export async function exportAppDump(ctx: Ctx<ExportAppDumpRequest>) {
// remove the 120 second limit for the request // remove the 120 second limit for the request
ctx.req.setTimeout(0) ctx.req.setTimeout(0)
const backupIdentifier = `${appName}-export-${new Date().getTime()}.tar.gz` const extension = encryptPassword ? "enc.tar.gz" : "tar.gz"
const backupIdentifier = `${appName}-export-${new Date().getTime()}.${extension}`
ctx.attachment(backupIdentifier) ctx.attachment(backupIdentifier)
ctx.body = await sdk.backups.streamExportApp(appId, excludeRows) ctx.body = await sdk.backups.streamExportApp({
appId,
excludeRows,
encryptPassword,
})
await context.doInAppContext(appId, async () => { await context.doInAppContext(appId, async () => {
const appDb = context.getAppDB() const appDb = context.getAppDB()

View File

@ -1,4 +1,4 @@
import { db as dbCore, objectStore } from "@budibase/backend-core" import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
import { budibaseTempDir } from "../../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
import { streamFile, createTempFolder } from "../../../utilities/fileSystem" import { streamFile, createTempFolder } from "../../../utilities/fileSystem"
import { ObjectStoreBuckets } from "../../../constants" import { ObjectStoreBuckets } from "../../../constants"
@ -18,7 +18,8 @@ import { join } from "path"
import env from "../../../environment" import env from "../../../environment"
const uuid = require("uuid/v4") const uuid = require("uuid/v4")
const tar = require("tar") import tar from "tar"
const MemoryStream = require("memorystream") const MemoryStream = require("memorystream")
interface DBDumpOpts { interface DBDumpOpts {
@ -30,16 +31,18 @@ interface ExportOpts extends DBDumpOpts {
tar?: boolean tar?: boolean
excludeRows?: boolean excludeRows?: boolean
excludeLogs?: boolean excludeLogs?: boolean
encryptPassword?: string
} }
function tarFilesToTmp(tmpDir: string, files: string[]) { function tarFilesToTmp(tmpDir: string, files: string[]) {
const exportFile = join(budibaseTempDir(), `${uuid()}.tar.gz`) const fileName = `${uuid()}.tar.gz`
const exportFile = join(budibaseTempDir(), fileName)
tar.create( tar.create(
{ {
sync: true, sync: true,
gzip: true, gzip: true,
file: exportFile, file: exportFile,
recursive: true, noDirRecurse: false,
cwd: tmpDir, cwd: tmpDir,
}, },
files files
@ -124,6 +127,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
) )
} }
} }
const downloadedPath = join(tmpPath, appPath) const downloadedPath = join(tmpPath, appPath)
if (fs.existsSync(downloadedPath)) { if (fs.existsSync(downloadedPath)) {
const allFiles = fs.readdirSync(downloadedPath) const allFiles = fs.readdirSync(downloadedPath)
@ -141,12 +145,27 @@ export async function exportApp(appId: string, config?: ExportOpts) {
filter: defineFilter(config?.excludeRows, config?.excludeLogs), filter: defineFilter(config?.excludeRows, config?.excludeLogs),
exportPath: dbPath, exportPath: dbPath,
}) })
if (config?.encryptPassword) {
for (let file of fs.readdirSync(tmpPath)) {
const path = join(tmpPath, file)
await encryption.encryptFile(
{ dir: tmpPath, filename: file },
config.encryptPassword
)
fs.rmSync(path)
}
}
// if tar requested, return where the tarball is // if tar requested, return where the tarball is
if (config?.tar) { if (config?.tar) {
// now the tmpPath contains both the DB export and attachments, tar this // now the tmpPath contains both the DB export and attachments, tar this
const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath)) const tarPath = tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
// cleanup the tmp export files as tarball returned // cleanup the tmp export files as tarball returned
fs.rmSync(tmpPath, { recursive: true, force: true }) fs.rmSync(tmpPath, { recursive: true, force: true })
return tarPath return tarPath
} }
// tar not requested, turn the directory where export is // tar not requested, turn the directory where export is
@ -161,11 +180,20 @@ export async function exportApp(appId: string, config?: ExportOpts) {
* @param {boolean} excludeRows Flag to state whether the export should include data. * @param {boolean} excludeRows Flag to state whether the export should include data.
* @returns {*} a readable stream of the backup which is written in real time * @returns {*} a readable stream of the backup which is written in real time
*/ */
export async function streamExportApp(appId: string, excludeRows: boolean) { export async function streamExportApp({
appId,
excludeRows,
encryptPassword,
}: {
appId: string
excludeRows: boolean
encryptPassword?: string
}) {
const tmpPath = await exportApp(appId, { const tmpPath = await exportApp(appId, {
excludeRows, excludeRows,
excludeLogs: true, excludeLogs: true,
tar: true, tar: true,
encryptPassword,
}) })
return streamFile(tmpPath) return streamFile(tmpPath)
} }

View File

@ -1,4 +1,4 @@
import { db as dbCore, objectStore } from "@budibase/backend-core" import { db as dbCore, encryption, objectStore } from "@budibase/backend-core"
import { Database, Row } from "@budibase/types" import { Database, Row } from "@budibase/types"
import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils" import { getAutomationParams, TABLE_ROW_PREFIX } from "../../../db/utils"
import { budibaseTempDir } from "../../../utilities/budibaseDir" import { budibaseTempDir } from "../../../utilities/budibaseDir"
@ -20,6 +20,7 @@ type TemplateType = {
file?: { file?: {
type: string type: string
path: string path: string
password?: string
} }
key?: string key?: string
} }
@ -123,6 +124,22 @@ export function untarFile(file: { path: string }) {
return tmpPath return tmpPath
} }
async function decryptFiles(path: string, password: string) {
try {
for (let file of fs.readdirSync(path)) {
const inputPath = join(path, file)
const outputPath = inputPath.replace(/\.enc$/, "")
await encryption.decryptFile(inputPath, outputPath, password)
fs.rmSync(inputPath)
}
} catch (err: any) {
if (err.message === "incorrect header check") {
throw new Error("File cannot be imported")
}
throw err
}
}
export function getGlobalDBFile(tmpPath: string) { export function getGlobalDBFile(tmpPath: string) {
return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8") return fs.readFileSync(join(tmpPath, GLOBAL_DB_EXPORT_FILE), "utf8")
} }
@ -143,6 +160,9 @@ export async function importApp(
template.file && fs.lstatSync(template.file.path).isDirectory() template.file && fs.lstatSync(template.file.path).isDirectory()
if (template.file && (isTar || isDirectory)) { if (template.file && (isTar || isDirectory)) {
const tmpPath = isTar ? untarFile(template.file) : template.file.path const tmpPath = isTar ? untarFile(template.file) : template.file.path
if (isTar && template.file.password) {
await decryptFiles(tmpPath, template.file.password)
}
const contents = fs.readdirSync(tmpPath) const contents = fs.readdirSync(tmpPath)
// have to handle object import // have to handle object import
if (contents.length) { if (contents.length) {

View File

@ -6197,13 +6197,13 @@
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
"@types/tar@6.1.3": "@types/tar@6.1.5":
version "6.1.3" version "6.1.5"
resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.3.tgz#46a2ce7617950c4852dfd7e9cd41aa8161b9d750" resolved "https://registry.yarnpkg.com/@types/tar/-/tar-6.1.5.tgz#90ccb3b6a35430e7427410d50eed564e85feaaff"
integrity sha512-YzDOr5kdAeqS8dcO6NTTHTMJ44MUCBDoLEIyPtwEn7PssKqUYL49R1iCVJPeiPzPlKi6DbH33eZkpeJ27e4vHg== integrity sha512-qm2I/RlZij5RofuY7vohTpYNaYcrSQlN2MyjucQc7ZweDwaEWkdN/EeNh6e9zjK6uEm6PwjdMXkcj05BxZdX1Q==
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
minipass "^3.3.5" minipass "^4.0.0"
"@types/tern@*": "@types/tern@*":
version "0.23.4" version "0.23.4"
@ -18034,7 +18034,7 @@ minipass-sized@^1.0.3:
dependencies: dependencies:
minipass "^3.0.0" minipass "^3.0.0"
minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6, minipass@^3.3.5: minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6:
version "3.3.6" version "3.3.6"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a"
integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==
@ -24160,6 +24160,18 @@ tar@6.1.11:
mkdirp "^1.0.3" mkdirp "^1.0.3"
yallist "^4.0.0" yallist "^4.0.0"
tar@6.1.15:
version "6.1.15"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69"
integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A==
dependencies:
chownr "^2.0.0"
fs-minipass "^2.0.0"
minipass "^5.0.0"
minizlib "^2.1.1"
mkdirp "^1.0.3"
yallist "^4.0.0"
tar@^6.1.11, tar@^6.1.2: tar@^6.1.11, tar@^6.1.2:
version "6.1.13" version "6.1.13"
resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b" resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.13.tgz#46e22529000f612180601a6fe0680e7da508847b"