abstract local file upload logic
This commit is contained in:
parent
6b00fb2d8e
commit
ccebe283cc
|
@ -64,7 +64,14 @@ function walkDir(dirPath, callback) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function prepareUploadForS3({ filePath, s3Key, metadata, fileType, s3 }) {
|
async function prepareUploadForS3({
|
||||||
|
filePath,
|
||||||
|
s3Key,
|
||||||
|
metadata,
|
||||||
|
fileType,
|
||||||
|
s3,
|
||||||
|
...file
|
||||||
|
}) {
|
||||||
const contentType =
|
const contentType =
|
||||||
fileType || CONTENT_TYPE_MAP[[...filePath.split(".")].pop().toLowerCase()]
|
fileType || CONTENT_TYPE_MAP[[...filePath.split(".")].pop().toLowerCase()]
|
||||||
const fileBytes = fs.readFileSync(filePath)
|
const fileBytes = fs.readFileSync(filePath)
|
||||||
|
@ -79,8 +86,7 @@ async function prepareUploadForS3({ filePath, s3Key, metadata, fileType, s3 }) {
|
||||||
.promise()
|
.promise()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
// TODO: return all the passed in file info
|
...file,
|
||||||
...upload,
|
|
||||||
url: upload.Location,
|
url: upload.Location,
|
||||||
key: upload.Key,
|
key: upload.Key,
|
||||||
}
|
}
|
||||||
|
@ -137,7 +143,7 @@ exports.uploadAppAssets = async function({
|
||||||
const attachmentUpload = prepareUploadForS3({
|
const attachmentUpload = prepareUploadForS3({
|
||||||
fileType: file.type,
|
fileType: file.type,
|
||||||
filePath: file.path,
|
filePath: file.path,
|
||||||
s3Key: `assets/${appId}/attachments/${file.name}`,
|
s3Key: `assets/${appId}/attachments/${file.processedFileName}`,
|
||||||
s3,
|
s3,
|
||||||
metadata: { accountId },
|
metadata: { accountId },
|
||||||
})
|
})
|
||||||
|
|
|
@ -31,8 +31,6 @@ exports.uploadFile = async function(ctx) {
|
||||||
? Array.from(ctx.request.files.file)
|
? Array.from(ctx.request.files.file)
|
||||||
: [ctx.request.files.file]
|
: [ctx.request.files.file]
|
||||||
|
|
||||||
console.log(files)
|
|
||||||
|
|
||||||
let uploads = []
|
let uploads = []
|
||||||
|
|
||||||
const attachmentsPath = resolve(
|
const attachmentsPath = resolve(
|
||||||
|
@ -45,37 +43,40 @@ exports.uploadFile = async function(ctx) {
|
||||||
// remote upload
|
// remote upload
|
||||||
const s3 = new AWS.S3({
|
const s3 = new AWS.S3({
|
||||||
params: {
|
params: {
|
||||||
// TODO: Don't hardcode
|
Bucket: "prod-budi-app-assets",
|
||||||
Bucket: "",
|
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
// TODO: probably need to UUID this too, so that we don't override by name
|
|
||||||
uploads = files.map(file =>
|
|
||||||
prepareUploadForS3({
|
|
||||||
fileType: file.type,
|
|
||||||
filePath: file.path,
|
|
||||||
s3Key: `assets/${ctx.user.appId}/attachments/${file.name}`,
|
|
||||||
s3,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
uploads = files.map(file => {
|
uploads = files.map(file => {
|
||||||
const fileExtension = [...file.name.split(".")].pop()
|
const fileExtension = [...file.name.split(".")].pop()
|
||||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||||
|
|
||||||
return fileProcessor.process({
|
return prepareUploadForS3({
|
||||||
format: file.format,
|
...file,
|
||||||
type: file.type,
|
fileType: file.type,
|
||||||
name: file.name,
|
filePath: file.path,
|
||||||
size: file.size,
|
s3Key: `assets/${ctx.user.appId}/attachments/${processedFileName}`,
|
||||||
path: file.path,
|
s3,
|
||||||
processedFileName,
|
|
||||||
extension: fileExtension,
|
|
||||||
outputPath: `${attachmentsPath}/${processedFileName}`,
|
|
||||||
url: `/attachments/${processedFileName}`,
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
} else {
|
||||||
|
uploads = processLocalFileUploads(files, attachmentsPath)
|
||||||
|
// uploads = files.map(file => {
|
||||||
|
// const fileExtension = [...file.name.split(".")].pop()
|
||||||
|
// const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||||
|
|
||||||
|
// return fileProcessor.process({
|
||||||
|
// format: file.format,
|
||||||
|
// type: file.type,
|
||||||
|
// name: file.name,
|
||||||
|
// size: file.size,
|
||||||
|
// path: file.path,
|
||||||
|
// processedFileName,
|
||||||
|
// extension: fileExtension,
|
||||||
|
// outputPath: `${attachmentsPath}/${processedFileName}`,
|
||||||
|
// url: `/attachments/${processedFileName}`,
|
||||||
|
// })
|
||||||
|
// })
|
||||||
}
|
}
|
||||||
|
|
||||||
const responses = await Promise.all(uploads)
|
const responses = await Promise.all(uploads)
|
||||||
|
@ -83,21 +84,13 @@ exports.uploadFile = async function(ctx) {
|
||||||
ctx.body = responses
|
ctx.body = responses
|
||||||
}
|
}
|
||||||
|
|
||||||
exports.processLocalFileUpload = async function(ctx) {
|
function processLocalFileUploads(files, attachmentsPath) {
|
||||||
const { files } = ctx.request.body
|
|
||||||
|
|
||||||
const attachmentsPath = resolve(
|
|
||||||
budibaseAppsDir(),
|
|
||||||
ctx.user.appId,
|
|
||||||
"attachments"
|
|
||||||
)
|
|
||||||
|
|
||||||
// create attachments dir if it doesnt exist
|
// create attachments dir if it doesnt exist
|
||||||
!fs.existsSync(attachmentsPath) &&
|
!fs.existsSync(attachmentsPath) &&
|
||||||
fs.mkdirSync(attachmentsPath, { recursive: true })
|
fs.mkdirSync(attachmentsPath, { recursive: true })
|
||||||
|
|
||||||
const filesToProcess = files.map(file => {
|
const filesToProcess = files.map(file => {
|
||||||
const fileExtension = [...file.path.split(".")].pop()
|
const fileExtension = [...file.name.split(".")].pop()
|
||||||
// filenames converted to UUIDs so they are unique
|
// filenames converted to UUIDs so they are unique
|
||||||
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||||
|
|
||||||
|
@ -110,10 +103,43 @@ exports.processLocalFileUpload = async function(ctx) {
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
const fileProcessOperations = filesToProcess.map(file =>
|
return filesToProcess.map(fileProcessor.process)
|
||||||
fileProcessor.process(file)
|
}
|
||||||
|
|
||||||
|
exports.performLocalFileProcessing = async function(ctx) {
|
||||||
|
const { files } = ctx.request.body
|
||||||
|
|
||||||
|
const processedFileOutputPath = resolve(
|
||||||
|
budibaseAppsDir(),
|
||||||
|
ctx.user.appId,
|
||||||
|
"attachments"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const fileProcessOperations = processLocalFileUploads(
|
||||||
|
files,
|
||||||
|
processedFileOutputPath
|
||||||
|
)
|
||||||
|
|
||||||
|
// // create attachments dir if it doesnt exist
|
||||||
|
// !fs.existsSync(attachmentsPath) &&
|
||||||
|
// fs.mkdirSync(attachmentsPath, { recursive: true })
|
||||||
|
|
||||||
|
// const filesToProcess = files.map(file => {
|
||||||
|
// const fileExtension = [...file.path.split(".")].pop()
|
||||||
|
// // filenames converted to UUIDs so they are unique
|
||||||
|
// const processedFileName = `${uuid.v4()}.${fileExtension}`
|
||||||
|
|
||||||
|
// return {
|
||||||
|
// ...file,
|
||||||
|
// processedFileName,
|
||||||
|
// extension: fileExtension,
|
||||||
|
// outputPath: join(attachmentsPath, processedFileName),
|
||||||
|
// url: join("/attachments", processedFileName),
|
||||||
|
// }
|
||||||
|
// })
|
||||||
|
|
||||||
|
// const fileProcessOperations = filesToProcess.map(fileProcessor.process)
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const processedFiles = await Promise.all(fileProcessOperations)
|
const processedFiles = await Promise.all(fileProcessOperations)
|
||||||
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ router
|
||||||
.post(
|
.post(
|
||||||
"/api/attachments/process",
|
"/api/attachments/process",
|
||||||
authorized(BUILDER),
|
authorized(BUILDER),
|
||||||
controller.processLocalFileUpload
|
controller.performLocalFileProcessing
|
||||||
)
|
)
|
||||||
.post("/api/attachments/upload", controller.uploadFile)
|
.post("/api/attachments/upload", controller.uploadFile)
|
||||||
.get("/componentlibrary", controller.serveComponentLibrary)
|
.get("/componentlibrary", controller.serveComponentLibrary)
|
||||||
|
|
|
@ -3,7 +3,6 @@
|
||||||
import { Heading, Body, Button } from "@budibase/bbui"
|
import { Heading, Body, Button } from "@budibase/bbui"
|
||||||
import { FILE_TYPES } from "./fileTypes"
|
import { FILE_TYPES } from "./fileTypes"
|
||||||
import api from "../api"
|
import api from "../api"
|
||||||
// import api from "builderStore/api"
|
|
||||||
|
|
||||||
const BYTES_IN_KB = 1000
|
const BYTES_IN_KB = 1000
|
||||||
const BYTES_IN_MB = 1000000
|
const BYTES_IN_MB = 1000000
|
||||||
|
@ -31,6 +30,14 @@
|
||||||
data.append("file", fileList[i])
|
data.append("file", fileList[i])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (Array.from(fileList).some(file => file.size >= fileSizeLimit)) {
|
||||||
|
alert(
|
||||||
|
`Files cannot exceed ${fileSizeLimit /
|
||||||
|
BYTES_IN_MB}MB. Please try again with smaller files.`
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
const response = await fetch("/api/attachments/upload", {
|
const response = await fetch("/api/attachments/upload", {
|
||||||
method: "POST",
|
method: "POST",
|
||||||
body: data,
|
body: data,
|
||||||
|
@ -118,12 +125,7 @@
|
||||||
{/if}
|
{/if}
|
||||||
</ul>
|
</ul>
|
||||||
<i class="ri-folder-upload-line" />
|
<i class="ri-folder-upload-line" />
|
||||||
<input
|
<input id="file-upload" type="file" multiple on:change={handleFile} />
|
||||||
id="file-upload"
|
|
||||||
name="uploads"
|
|
||||||
type="file"
|
|
||||||
multiple
|
|
||||||
on:change={handleFile} />
|
|
||||||
<label for="file-upload">Upload</label>
|
<label for="file-upload">Upload</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
@ -183,7 +185,7 @@
|
||||||
display: flex;
|
display: flex;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
bottom: var(--spacing-s);
|
bottom: var(--spacing-s);
|
||||||
border-radius: 10px;
|
border-radius: 5px;
|
||||||
transition: 0.2s transform;
|
transition: 0.2s transform;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -264,6 +266,7 @@
|
||||||
|
|
||||||
.filename {
|
.filename {
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
|
margin-left: 5px;
|
||||||
text-overflow: ellipsis;
|
text-overflow: ellipsis;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue