Updating some of the deployment/uploading mechanism purely to remove file system, all of this will go anyway but some of it was used to handle file uploads (attachments) to object store so needed to convert it a bit

This commit is contained in:
mike12345567 2021-03-22 18:06:10 +00:00
parent 78eb00b39d
commit 36c20b2498
9 changed files with 65 additions and 53 deletions

View File

@ -1,7 +1,6 @@
node_modules/
myapps/
.env
dev.env
/builder/*
!/builder/assets/
!/builder/pickr.min.js

1
packages/server/dev.env Normal file
View File

@ -0,0 +1 @@
PORT=4001

View File

@ -137,6 +137,7 @@
"devDependencies": {
"@budibase/standard-components": "^0.8.9",
"@jest/test-sequencer": "^24.8.0",
"docker-compose": "^0.23.6",
"electron": "10.1.3",
"electron-builder": "^22.9.1",
"electron-builder-notarize": "^1.1.2",

View File

@ -66,12 +66,7 @@ exports.deploy = async function(deployment) {
const appId = deployment.getAppId()
const { bucket, accountId } = deployment.getVerification()
const metadata = { accountId }
const s3Client = new AWS.S3({
params: {
Bucket: bucket,
},
})
await deployToObjectStore(appId, s3Client, metadata)
await deployToObjectStore(appId, bucket, metadata)
}
exports.replicateDb = async function(deployment) {

View File

@ -7,7 +7,6 @@ const {
const {
getWorkerUrl,
getCouchUrl,
getMinioUrl,
getSelfHostKey,
} = require("../../../utilities/builder/hosting")
@ -45,17 +44,9 @@ exports.postDeployment = async function() {
exports.deploy = async function(deployment) {
const appId = deployment.getAppId()
const verification = deployment.getVerification()
const objClient = new AWS.S3({
endpoint: await getMinioUrl(),
s3ForcePathStyle: true, // needed with minio?
signatureVersion: "v4",
params: {
Bucket: verification.bucket,
},
})
// no metadata, aws has account ID in metadata
const metadata = {}
await deployToObjectStore(appId, objClient, metadata)
await deployToObjectStore(appId, verification.bucket, metadata)
}
exports.replicateDb = async function(deployment) {

View File

@ -1,17 +1,10 @@
const fs = require("fs")
const sanitize = require("sanitize-s3-objectkey")
const { walkDir } = require("../../../utilities")
const { join } = require("../../../utilities/centralPath")
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
const fetch = require("node-fetch")
const PouchDB = require("../../../db")
const CouchDB = require("pouchdb")
const CONTENT_TYPE_MAP = {
html: "text/html",
css: "text/css",
js: "application/javascript",
}
const { upload } = require("../../../utilities/fileSystem")
exports.fetchCredentials = async function(url, body) {
const response = await fetch(url, {
@ -34,30 +27,25 @@ exports.fetchCredentials = async function(url, body) {
return json
}
exports.prepareUpload = async function({ s3Key, metadata, client, file }) {
const extension = [...file.name.split(".")].pop()
const fileBytes = fs.readFileSync(file.path)
const upload = await client
.upload({
// windows file paths need to be converted to forward slashes for s3
Key: sanitize(s3Key).replace(/\\/g, "/"),
Body: fileBytes,
ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
Metadata: metadata,
exports.prepareUpload = async function({ s3Key, bucket, metadata, file }) {
const response = await upload({
bucket,
metadata,
filename: s3Key,
path: file.path,
type: file.type,
})
.promise()
return {
size: file.size,
name: file.name,
extension,
url: upload.Location,
key: upload.Key,
extension: [...file.name.split(".")].pop(),
url: response.Location,
key: response.Key,
}
}
exports.deployToObjectStore = async function(appId, objectClient, metadata) {
exports.deployToObjectStore = async function(appId, bucket, metadata) {
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
let uploads = []
@ -66,12 +54,12 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
walkDir(appAssetsPath, function(filePath) {
const filePathParts = filePath.split("/")
const appAssetUpload = exports.prepareUpload({
bucket,
file: {
path: filePath,
name: filePathParts.pop(),
},
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
client: objectClient,
metadata,
})
uploads.push(appAssetUpload)
@ -92,7 +80,7 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
const attachmentUpload = exports.prepareUpload({
file,
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
client: objectClient,
bucket,
metadata,
})

View File

@ -4,7 +4,6 @@ const send = require("koa-send")
const { resolve, join } = require("../../../utilities/centralPath")
const fetch = require("node-fetch")
const uuid = require("uuid")
const AWS = require("aws-sdk")
const { prepareUpload } = require("../deploy/utils")
const { processString } = require("@budibase/string-templates")
const {
@ -56,12 +55,6 @@ exports.uploadFile = async function(ctx) {
? Array.from(ctx.request.files.file)
: [ctx.request.files.file]
const s3 = new AWS.S3({
params: {
Bucket: "prod-budi-app-assets",
},
})
const uploads = files.map(async file => {
const fileExtension = [...file.name.split(".")].pop()
// filenames converted to UUIDs so they are unique
@ -76,7 +69,7 @@ exports.uploadFile = async function(ctx) {
return prepareUpload({
file,
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
s3,
bucket: "prod-budi-app-assets",
})
})

View File

@ -5,7 +5,7 @@ const { join } = require("path")
const uuid = require("uuid/v4")
const CouchDB = require("../../db")
const { ObjectStoreBuckets } = require("../../constants")
const { streamUpload, deleteFolder, downloadTarball } = require("./utilities")
const { upload, streamUpload, deleteFolder, downloadTarball } = require("./utilities")
const { downloadLibraries, newAppPublicPath } = require("./newApp")
/**
@ -104,3 +104,8 @@ exports.downloadTemplate = async (type, name) => {
exports.readFileSync = (filepath, options = "utf8") => {
return fs.readFileSync(filepath, options)
}
/**
* Full function definition provided in the utilities.
*/
exports.upload = upload

View File

@ -9,9 +9,16 @@ const { join } = require("path")
const { streamUpload } = require("./utilities")
const fs = require("fs")
const { budibaseTempDir } = require("../budibaseDir")
const env = require("../../environment")
const streamPipeline = promisify(stream.pipeline)
const CONTENT_TYPE_MAP = {
html: "text/html",
css: "text/css",
js: "application/javascript",
}
/**
* Gets a connection to the object store using the S3 SDK.
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
@ -20,6 +27,10 @@ const streamPipeline = promisify(stream.pipeline)
*/
exports.ObjectStore = bucket => {
return new AWS.S3({
// TODO: need to deal with endpoint properly
endpoint: env.MINIO_URL,
s3ForcePathStyle: true, // needed with minio?
signatureVersion: "v4",
params: {
Bucket: bucket,
},
@ -47,6 +58,34 @@ exports.makeSureBucketExists = async (client, bucketName) => {
}
}
/**
* Uploads the contents of a file given the required parameters, useful when
* temp files in use (for example file uploaded as an attachment).
* @param {string} bucket The name of the bucket to be uploaded to.
* @param {string} filename The name/path of the file in the object store.
* @param {string} path The path to the file (ideally a temporary file).
* @param {string} type If the content type is known can be specified.
* @param {object} metadata If there is metadata for the object it can be passed as well.
* @return {Promise<ManagedUpload.SendData>} The file has been uploaded to the object store successfully when
* promise completes.
*/
exports.upload = async ({ bucket, filename, path, type, metadata }) => {
const extension = [...filename.split(".")].pop()
const fileBytes = fs.readFileSync(path)
const objectStore = exports.ObjectStore(bucket)
const config = {
// windows file paths need to be converted to forward slashes for s3
Key: sanitize(filename).replace(/\\/g, "/"),
Body: fileBytes,
ContentType: type || CONTENT_TYPE_MAP[extension.toLowerCase()],
}
if (metadata) {
config.Metadata = metadata
}
return objectStore.upload(config).promise()
}
exports.streamUpload = async (bucket, filename, stream) => {
const objectStore = exports.ObjectStore(bucket)
await exports.makeSureBucketExists(objectStore, bucket)