ensuring s3 upload works in prod
This commit is contained in:
parent
fd97c14a50
commit
31dd25331e
|
@ -64,29 +64,23 @@ function walkDir(dirPath, callback) {
|
|||
}
|
||||
}
|
||||
|
||||
async function prepareUploadForS3({
|
||||
filePath,
|
||||
s3Key,
|
||||
metadata,
|
||||
fileType,
|
||||
s3,
|
||||
...file
|
||||
}) {
|
||||
const contentType =
|
||||
fileType || CONTENT_TYPE_MAP[[...filePath.split(".")].pop().toLowerCase()]
|
||||
const fileBytes = fs.readFileSync(filePath)
|
||||
async function prepareUploadForS3({ s3Key, metadata, s3, file }) {
|
||||
const extension = [...file.name.split(".")].pop()
|
||||
const fileBytes = fs.readFileSync(file.path)
|
||||
|
||||
const upload = await s3
|
||||
.upload({
|
||||
Key: s3Key,
|
||||
Body: fileBytes,
|
||||
ContentType: contentType,
|
||||
ContentType: file.type || CONTENT_TYPE_MAP[extension.toLowerCase()],
|
||||
Metadata: metadata,
|
||||
})
|
||||
.promise()
|
||||
|
||||
return {
|
||||
...file,
|
||||
size: file.size,
|
||||
name: file.name,
|
||||
extension,
|
||||
url: upload.Location,
|
||||
key: upload.Key,
|
||||
}
|
||||
|
@ -124,7 +118,10 @@ exports.uploadAppAssets = async function({
|
|||
// Upload HTML, CSS and JS for each page of the web app
|
||||
walkDir(`${appAssetsPath}/${page}`, function(filePath) {
|
||||
const appAssetUpload = prepareUploadForS3({
|
||||
filePath,
|
||||
file: {
|
||||
path: filePath,
|
||||
name: [...filePath.split("/")].pop(),
|
||||
},
|
||||
s3Key: filePath.replace(appAssetsPath, `assets/${appId}`),
|
||||
s3,
|
||||
metadata: { accountId },
|
||||
|
@ -141,8 +138,7 @@ exports.uploadAppAssets = async function({
|
|||
if (file.uploaded) continue
|
||||
|
||||
const attachmentUpload = prepareUploadForS3({
|
||||
fileType: file.type,
|
||||
filePath: file.path,
|
||||
file,
|
||||
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
||||
s3,
|
||||
metadata: { accountId },
|
||||
|
|
|
@ -50,9 +50,7 @@ exports.uploadFile = async function(ctx) {
|
|||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||
|
||||
return prepareUploadForS3({
|
||||
...file,
|
||||
fileType: file.type,
|
||||
filePath: file.path,
|
||||
file,
|
||||
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
||||
s3,
|
||||
})
|
||||
|
@ -70,6 +68,7 @@ exports.uploadFile = async function(ctx) {
|
|||
}
|
||||
|
||||
async function processLocalFileUploads({ files, outputPath, instanceId }) {
|
||||
console.log("files", files)
|
||||
// create attachments dir if it doesnt exist
|
||||
!fs.existsSync(outputPath) && fs.mkdirSync(outputPath, { recursive: true })
|
||||
|
||||
|
@ -78,12 +77,20 @@ async function processLocalFileUploads({ files, outputPath, instanceId }) {
|
|||
// filenames converted to UUIDs so they are unique
|
||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||
|
||||
console.log(file)
|
||||
// {
|
||||
// name: 'backspace-solid.svg',
|
||||
// path: '/Users/martinmckeaveney/Downloads/backspace-solid.svg',
|
||||
// size: 813,
|
||||
// type: 'image/svg+xml'
|
||||
// }
|
||||
|
||||
return {
|
||||
...file,
|
||||
name: file.name,
|
||||
path: file.path,
|
||||
size: file.size,
|
||||
type: file.type,
|
||||
processedFileName,
|
||||
extension: fileExtension,
|
||||
// extension: fileExtension,
|
||||
outputPath: join(outputPath, processedFileName),
|
||||
url: join("/attachments", processedFileName),
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue