Updating some of the deployment/uploading mechanism purely to remove file system, all of this will go anyway but some of it was used to handle file uploads (attachments) to object store so needed to convert it a bit
This commit is contained in:
parent
35e3999b05
commit
091782389a
|
@ -1,7 +1,6 @@
|
||||||
node_modules/
|
node_modules/
|
||||||
myapps/
|
myapps/
|
||||||
.env
|
.env
|
||||||
dev.env
|
|
||||||
/builder/*
|
/builder/*
|
||||||
!/builder/assets/
|
!/builder/assets/
|
||||||
!/builder/pickr.min.js
|
!/builder/pickr.min.js
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
PORT=4001
|
|
@ -137,6 +137,7 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@budibase/standard-components": "^0.8.9",
|
"@budibase/standard-components": "^0.8.9",
|
||||||
"@jest/test-sequencer": "^24.8.0",
|
"@jest/test-sequencer": "^24.8.0",
|
||||||
|
"docker-compose": "^0.23.6",
|
||||||
"electron": "10.1.3",
|
"electron": "10.1.3",
|
||||||
"electron-builder": "^22.9.1",
|
"electron-builder": "^22.9.1",
|
||||||
"electron-builder-notarize": "^1.1.2",
|
"electron-builder-notarize": "^1.1.2",
|
||||||
|
|
|
@ -66,12 +66,7 @@ exports.deploy = async function(deployment) {
|
||||||
const appId = deployment.getAppId()
|
const appId = deployment.getAppId()
|
||||||
const { bucket, accountId } = deployment.getVerification()
|
const { bucket, accountId } = deployment.getVerification()
|
||||||
const metadata = { accountId }
|
const metadata = { accountId }
|
||||||
const s3Client = new AWS.S3({
|
await deployToObjectStore(appId, bucket, metadata)
|
||||||
params: {
|
|
||||||
Bucket: bucket,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
await deployToObjectStore(appId, s3Client, metadata)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.replicateDb = async function(deployment) {
|
exports.replicateDb = async function(deployment) {
|
||||||
|
|
|
@ -7,7 +7,6 @@ const {
|
||||||
const {
|
const {
|
||||||
getWorkerUrl,
|
getWorkerUrl,
|
||||||
getCouchUrl,
|
getCouchUrl,
|
||||||
getMinioUrl,
|
|
||||||
getSelfHostKey,
|
getSelfHostKey,
|
||||||
} = require("../../../utilities/builder/hosting")
|
} = require("../../../utilities/builder/hosting")
|
||||||
|
|
||||||
|
@ -45,17 +44,9 @@ exports.postDeployment = async function() {
|
||||||
exports.deploy = async function(deployment) {
|
exports.deploy = async function(deployment) {
|
||||||
const appId = deployment.getAppId()
|
const appId = deployment.getAppId()
|
||||||
const verification = deployment.getVerification()
|
const verification = deployment.getVerification()
|
||||||
const objClient = new AWS.S3({
|
|
||||||
endpoint: await getMinioUrl(),
|
|
||||||
s3ForcePathStyle: true, // needed with minio?
|
|
||||||
signatureVersion: "v4",
|
|
||||||
params: {
|
|
||||||
Bucket: verification.bucket,
|
|
||||||
},
|
|
||||||
})
|
|
||||||
// no metadata, aws has account ID in metadata
|
// no metadata, aws has account ID in metadata
|
||||||
const metadata = {}
|
const metadata = {}
|
||||||
await deployToObjectStore(appId, objClient, metadata)
|
await deployToObjectStore(appId, verification.bucket, metadata)
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.replicateDb = async function(deployment) {
|
exports.replicateDb = async function(deployment) {
|
||||||
|
|
|
@ -1,17 +1,10 @@
|
||||||
const fs = require("fs")
|
|
||||||
const sanitize = require("sanitize-s3-objectkey")
|
|
||||||
const { walkDir } = require("../../../utilities")
|
const { walkDir } = require("../../../utilities")
|
||||||
const { join } = require("../../../utilities/centralPath")
|
const { join } = require("../../../utilities/centralPath")
|
||||||
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
const { budibaseAppsDir } = require("../../../utilities/budibaseDir")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const PouchDB = require("../../../db")
|
const PouchDB = require("../../../db")
|
||||||
const CouchDB = require("pouchdb")
|
const CouchDB = require("pouchdb")
|
||||||
|
const { upload } = require("../../../utilities/fileSystem")
|
||||||
const CONTENT_TYPE_MAP = {
|
|
||||||
html: "text/html",
|
|
||||||
css: "text/css",
|
|
||||||
js: "application/javascript",
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.fetchCredentials = async function(url, body) {
|
exports.fetchCredentials = async function(url, body) {
|
||||||
const response = await fetch(url, {
|
const response = await fetch(url, {
|
||||||
|
@ -34,30 +27,25 @@ exports.fetchCredentials = async function(url, body) {
|
||||||
return json
|
return json
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.prepareUpload = async function({ s3Key, metadata, client, file }) {
|
exports.prepareUpload = async function({ s3Key, bucket, metadata, file }) {
|
||||||
const extension = [...file.name.split(".")].pop()
|
const response = await upload({
|
||||||
const fileBytes = fs.readFileSync(file.path)
|
bucket,
|
||||||
|
metadata,
|
||||||
const upload = await client
|
filename: s3Key,
|
||||||
.upload({
|
path: file.path,
|
||||||
// windows file paths need to be converted to forward slashes for s3
|
type: file.type,
|
||||||
Key: sanitize(s3Key).replace(/\\/g, "/"),
|
|
||||||
Body: fileBytes,
|
|
||||||
ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
|
||||||
Metadata: metadata,
|
|
||||||
})
|
})
|
||||||
.promise()
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
size: file.size,
|
size: file.size,
|
||||||
name: file.name,
|
name: file.name,
|
||||||
extension,
|
extension: [...file.name.split(".")].pop(),
|
||||||
url: upload.Location,
|
url: response.Location,
|
||||||
key: upload.Key,
|
key: response.Key,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.deployToObjectStore = async function(appId, objectClient, metadata) {
|
exports.deployToObjectStore = async function(appId, bucket, metadata) {
|
||||||
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
|
const appAssetsPath = join(budibaseAppsDir(), appId, "public")
|
||||||
|
|
||||||
let uploads = []
|
let uploads = []
|
||||||
|
@ -66,12 +54,12 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
|
||||||
walkDir(appAssetsPath, function(filePath) {
|
walkDir(appAssetsPath, function(filePath) {
|
||||||
const filePathParts = filePath.split("/")
|
const filePathParts = filePath.split("/")
|
||||||
const appAssetUpload = exports.prepareUpload({
|
const appAssetUpload = exports.prepareUpload({
|
||||||
|
bucket,
|
||||||
file: {
|
file: {
|
||||||
path: filePath,
|
path: filePath,
|
||||||
name: filePathParts.pop(),
|
name: filePathParts.pop(),
|
||||||
},
|
},
|
||||||
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
|
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
|
||||||
client: objectClient,
|
|
||||||
metadata,
|
metadata,
|
||||||
})
|
})
|
||||||
uploads.push(appAssetUpload)
|
uploads.push(appAssetUpload)
|
||||||
|
@ -92,7 +80,7 @@ exports.deployToObjectStore = async function(appId, objectClient, metadata) {
|
||||||
const attachmentUpload = exports.prepareUpload({
|
const attachmentUpload = exports.prepareUpload({
|
||||||
file,
|
file,
|
||||||
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
||||||
client: objectClient,
|
bucket,
|
||||||
metadata,
|
metadata,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,6 @@ const send = require("koa-send")
|
||||||
const { resolve, join } = require("../../../utilities/centralPath")
|
const { resolve, join } = require("../../../utilities/centralPath")
|
||||||
const fetch = require("node-fetch")
|
const fetch = require("node-fetch")
|
||||||
const uuid = require("uuid")
|
const uuid = require("uuid")
|
||||||
const AWS = require("aws-sdk")
|
|
||||||
const { prepareUpload } = require("../deploy/utils")
|
const { prepareUpload } = require("../deploy/utils")
|
||||||
const { processString } = require("@budibase/string-templates")
|
const { processString } = require("@budibase/string-templates")
|
||||||
const {
|
const {
|
||||||
|
@ -56,12 +55,6 @@ exports.uploadFile = async function(ctx) {
|
||||||
? Array.from(ctx.request.files.file)
|
? Array.from(ctx.request.files.file)
|
||||||
: [ctx.request.files.file]
|
: [ctx.request.files.file]
|
||||||
|
|
||||||
const s3 = new AWS.S3({
|
|
||||||
params: {
|
|
||||||
Bucket: "prod-budi-app-assets",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
|
|
||||||
const uploads = files.map(async file => {
|
const uploads = files.map(async file => {
|
||||||
const fileExtension = [...file.name.split(".")].pop()
|
const fileExtension = [...file.name.split(".")].pop()
|
||||||
// filenames converted to UUIDs so they are unique
|
// filenames converted to UUIDs so they are unique
|
||||||
|
@ -76,7 +69,7 @@ exports.uploadFile = async function(ctx) {
|
||||||
return prepareUpload({
|
return prepareUpload({
|
||||||
file,
|
file,
|
||||||
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
||||||
s3,
|
bucket: "prod-budi-app-assets",
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ const { join } = require("path")
|
||||||
const uuid = require("uuid/v4")
|
const uuid = require("uuid/v4")
|
||||||
const CouchDB = require("../../db")
|
const CouchDB = require("../../db")
|
||||||
const { ObjectStoreBuckets } = require("../../constants")
|
const { ObjectStoreBuckets } = require("../../constants")
|
||||||
const { streamUpload, deleteFolder, downloadTarball } = require("./utilities")
|
const { upload, streamUpload, deleteFolder, downloadTarball } = require("./utilities")
|
||||||
const { downloadLibraries, newAppPublicPath } = require("./newApp")
|
const { downloadLibraries, newAppPublicPath } = require("./newApp")
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -104,3 +104,8 @@ exports.downloadTemplate = async (type, name) => {
|
||||||
exports.readFileSync = (filepath, options = "utf8") => {
|
exports.readFileSync = (filepath, options = "utf8") => {
|
||||||
return fs.readFileSync(filepath, options)
|
return fs.readFileSync(filepath, options)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Full function definition provided in the utilities.
|
||||||
|
*/
|
||||||
|
exports.upload = upload
|
||||||
|
|
|
@ -9,9 +9,16 @@ const { join } = require("path")
|
||||||
const { streamUpload } = require("./utilities")
|
const { streamUpload } = require("./utilities")
|
||||||
const fs = require("fs")
|
const fs = require("fs")
|
||||||
const { budibaseTempDir } = require("../budibaseDir")
|
const { budibaseTempDir } = require("../budibaseDir")
|
||||||
|
const env = require("../../environment")
|
||||||
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
const streamPipeline = promisify(stream.pipeline)
|
||||||
|
|
||||||
|
const CONTENT_TYPE_MAP = {
|
||||||
|
html: "text/html",
|
||||||
|
css: "text/css",
|
||||||
|
js: "application/javascript",
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets a connection to the object store using the S3 SDK.
|
* Gets a connection to the object store using the S3 SDK.
|
||||||
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
* @param {string} bucket the name of the bucket which blobs will be uploaded/retrieved from.
|
||||||
|
@ -20,6 +27,10 @@ const streamPipeline = promisify(stream.pipeline)
|
||||||
*/
|
*/
|
||||||
exports.ObjectStore = bucket => {
|
exports.ObjectStore = bucket => {
|
||||||
return new AWS.S3({
|
return new AWS.S3({
|
||||||
|
// TODO: need to deal with endpoint properly
|
||||||
|
endpoint: env.MINIO_URL,
|
||||||
|
s3ForcePathStyle: true, // needed with minio?
|
||||||
|
signatureVersion: "v4",
|
||||||
params: {
|
params: {
|
||||||
Bucket: bucket,
|
Bucket: bucket,
|
||||||
},
|
},
|
||||||
|
@ -47,6 +58,34 @@ exports.makeSureBucketExists = async (client, bucketName) => {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uploads the contents of a file given the required parameters, useful when
|
||||||
|
* temp files in use (for example file uploaded as an attachment).
|
||||||
|
* @param {string} bucket The name of the bucket to be uploaded to.
|
||||||
|
* @param {string} filename The name/path of the file in the object store.
|
||||||
|
* @param {string} path The path to the file (ideally a temporary file).
|
||||||
|
* @param {string} type If the content type is known can be specified.
|
||||||
|
* @param {object} metadata If there is metadata for the object it can be passed as well.
|
||||||
|
* @return {Promise<ManagedUpload.SendData>} The file has been uploaded to the object store successfully when
|
||||||
|
* promise completes.
|
||||||
|
*/
|
||||||
|
exports.upload = async ({ bucket, filename, path, type, metadata }) => {
|
||||||
|
const extension = [...filename.split(".")].pop()
|
||||||
|
const fileBytes = fs.readFileSync(path)
|
||||||
|
|
||||||
|
const objectStore = exports.ObjectStore(bucket)
|
||||||
|
const config = {
|
||||||
|
// windows file paths need to be converted to forward slashes for s3
|
||||||
|
Key: sanitize(filename).replace(/\\/g, "/"),
|
||||||
|
Body: fileBytes,
|
||||||
|
ContentType: type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
||||||
|
}
|
||||||
|
if (metadata) {
|
||||||
|
config.Metadata = metadata
|
||||||
|
}
|
||||||
|
return objectStore.upload(config).promise()
|
||||||
|
}
|
||||||
|
|
||||||
exports.streamUpload = async (bucket, filename, stream) => {
|
exports.streamUpload = async (bucket, filename, stream) => {
|
||||||
const objectStore = exports.ObjectStore(bucket)
|
const objectStore = exports.ObjectStore(bucket)
|
||||||
await exports.makeSureBucketExists(objectStore, bucket)
|
await exports.makeSureBucketExists(objectStore, bucket)
|
||||||
|
|
Loading…
Reference in New Issue