Update S3 upload with develop and fix preview URLs
This commit is contained in:
parent
3f2b6db2b2
commit
b3dba52d62
|
@ -146,7 +146,9 @@
|
|||
<img alt="preview" src={selectedUrl} />
|
||||
{:else}
|
||||
<div class="placeholder">
|
||||
<div class="extension">{selectedImage.extension}</div>
|
||||
<div class="extension">
|
||||
{selectedImage.name || "Unknown file"}
|
||||
</div>
|
||||
<div>Preview not supported</div>
|
||||
</div>
|
||||
{/if}
|
||||
|
@ -357,18 +359,21 @@
|
|||
white-space: nowrap;
|
||||
width: 0;
|
||||
margin-right: 10px;
|
||||
user-select: all;
|
||||
}
|
||||
.placeholder {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
text-align: center;
|
||||
}
|
||||
.extension {
|
||||
color: var(--spectrum-global-color-gray-600);
|
||||
text-transform: uppercase;
|
||||
font-weight: 600;
|
||||
margin-bottom: 5px;
|
||||
user-select: all;
|
||||
}
|
||||
|
||||
.nav {
|
||||
|
|
|
@ -11,7 +11,25 @@ export const uploadAttachment = async (data, tableId = "") => {
|
|||
})
|
||||
}
|
||||
|
||||
export const uploadToS3 = async (signedUrl, data) => {
|
||||
/**
|
||||
* Generates a signed URL to upload a file to an external datasource.
|
||||
*/
|
||||
export const getSignedDatasourceURL = async (datasourceId, bucket, key) => {
|
||||
if (!datasourceId) {
|
||||
return null
|
||||
}
|
||||
const res = await API.post({
|
||||
url: `/api/attachments/${datasourceId}/url`,
|
||||
body: { bucket, key },
|
||||
})
|
||||
return res?.signedUrl
|
||||
}
|
||||
|
||||
/**
|
||||
* Uploads a file to an external datasource.
|
||||
*/
|
||||
export const externalUpload = async (datasourceId, bucket, key, data) => {
|
||||
const signedUrl = await getSignedDatasourceURL(datasourceId, bucket, key)
|
||||
await API.put({
|
||||
url: signedUrl,
|
||||
body: data,
|
||||
|
|
|
@ -1,8 +1,7 @@
|
|||
<script>
|
||||
import Field from "./Field.svelte"
|
||||
import { CoreDropzone } from "@budibase/bbui"
|
||||
import { CoreDropzone, ProgressCircle } from "@budibase/bbui"
|
||||
import { getContext, onMount, onDestroy } from "svelte"
|
||||
import { getSignedS3URL } from "../../../api/index.js"
|
||||
|
||||
export let datasourceId
|
||||
export let bucket
|
||||
|
@ -17,12 +16,13 @@
|
|||
|
||||
const { API, notificationStore, uploadStore } = getContext("sdk")
|
||||
const component = getContext("component")
|
||||
const formContext = getContext("form")
|
||||
|
||||
// 5GB cap per item sent via S3 REST API
|
||||
const MaxFileSize = 1000000000 * 5
|
||||
|
||||
let file
|
||||
// Actual file data to upload
|
||||
let data
|
||||
let loading = false
|
||||
|
||||
const handleFileTooLarge = () => {
|
||||
notificationStore.actions.warning(
|
||||
|
@ -30,20 +30,48 @@
|
|||
)
|
||||
}
|
||||
|
||||
// Process the file input and return a serializable structure expected by
|
||||
// the dropzone component to display the file
|
||||
const processFiles = async fileList => {
|
||||
// let data = new FormData()
|
||||
// for (let i = 0; i < fileList.length; i++) {
|
||||
// data.append("file", fileList[i])
|
||||
// }
|
||||
// return await API.uploadAttachment(data, formContext?.dataSource?.tableId)
|
||||
file = fileList[0]
|
||||
console.log("processing", fileList)
|
||||
return []
|
||||
return await new Promise(resolve => {
|
||||
if (!fileList?.length) {
|
||||
return []
|
||||
}
|
||||
|
||||
// Don't read in non-image files
|
||||
data = fileList[0]
|
||||
if (!data.type?.startsWith("image")) {
|
||||
resolve([
|
||||
{
|
||||
name: data.name,
|
||||
type: data.type,
|
||||
},
|
||||
])
|
||||
}
|
||||
|
||||
// Read image files and display as preview
|
||||
const reader = new FileReader()
|
||||
reader.addEventListener(
|
||||
"load",
|
||||
() => {
|
||||
resolve([
|
||||
{
|
||||
url: reader.result,
|
||||
name: data.name,
|
||||
type: data.type,
|
||||
},
|
||||
])
|
||||
},
|
||||
false
|
||||
)
|
||||
reader.readAsDataURL(fileList[0])
|
||||
})
|
||||
}
|
||||
|
||||
const upload = async () => {
|
||||
const url = await API.getSignedS3URL(datasourceId, bucket, key)
|
||||
await API.uploadToS3(url, file)
|
||||
loading = true
|
||||
await API.externalUpload(datasourceId, bucket, key, data)
|
||||
loading = false
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
|
@ -68,7 +96,7 @@
|
|||
{#if fieldState}
|
||||
<CoreDropzone
|
||||
value={fieldState.value}
|
||||
disabled={fieldState.disabled}
|
||||
disabled={loading || fieldState.disabled}
|
||||
error={fieldState.error}
|
||||
on:change={e => {
|
||||
fieldApi.setValue(e.detail)
|
||||
|
@ -79,4 +107,26 @@
|
|||
fileSizeLimit={MaxFileSize}
|
||||
/>
|
||||
{/if}
|
||||
{#if loading}
|
||||
<div class="overlay" />
|
||||
<div class="loading">
|
||||
<ProgressCircle />
|
||||
</div>
|
||||
{/if}
|
||||
</Field>
|
||||
|
||||
<style>
|
||||
.overlay,
|
||||
.loading {
|
||||
position: absolute;
|
||||
top: 0;
|
||||
height: 100%;
|
||||
width: 100%;
|
||||
display: grid;
|
||||
place-items: center;
|
||||
}
|
||||
.overlay {
|
||||
background-color: var(--spectrum-global-color-gray-50);
|
||||
opacity: 0.5;
|
||||
}
|
||||
</style>
|
||||
|
|
|
@ -17,6 +17,8 @@ const { clientLibraryPath } = require("../../../utilities")
|
|||
const { upload } = require("../../../utilities/fileSystem")
|
||||
const { attachmentsRelativeURL } = require("../../../utilities")
|
||||
const { DocumentTypes } = require("../../../db/utils")
|
||||
const AWS = require("aws-sdk")
|
||||
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
|
||||
|
||||
async function prepareUpload({ s3Key, bucket, metadata, file }) {
|
||||
const response = await upload({
|
||||
|
@ -104,3 +106,39 @@ exports.serveClientLibrary = async function (ctx) {
|
|||
root: join(NODE_MODULES_PATH, "@budibase", "client", "dist"),
|
||||
})
|
||||
}
|
||||
|
||||
exports.getSignedUploadURL = async function (ctx) {
|
||||
// Ensure datasource is valid
|
||||
const { datasourceId } = ctx.params
|
||||
const database = new CouchDB(ctx.appId)
|
||||
const datasource = await database.get(datasourceId)
|
||||
if (!datasource) {
|
||||
ctx.throw(400, "The specified datasource could not be found")
|
||||
return
|
||||
}
|
||||
|
||||
// Determine type of datasource and generate signed URL
|
||||
let signedUrl
|
||||
if (datasource.source === "S3") {
|
||||
const { bucket, key } = ctx.request.body || {}
|
||||
if (!bucket || !key) {
|
||||
ctx.throw(400, "datasourceId, bucket and key must be specified")
|
||||
return
|
||||
}
|
||||
try {
|
||||
const s3 = new AWS.S3({
|
||||
region: AWS_REGION,
|
||||
accessKeyId: datasource?.config?.accessKeyId,
|
||||
secretAccessKey: datasource?.config?.secretAccessKey,
|
||||
apiVersion: "2006-03-01",
|
||||
signatureVersion: "v4",
|
||||
})
|
||||
const params = { Bucket: bucket, Key: key }
|
||||
signedUrl = s3.getSignedUrl("putObject", params)
|
||||
} catch (error) {
|
||||
ctx.throw(400, error)
|
||||
}
|
||||
}
|
||||
|
||||
ctx.body = { signedUrl }
|
||||
}
|
||||
|
|
|
@ -93,10 +93,5 @@ router
|
|||
authorized(BUILDER),
|
||||
datasourceController.destroy
|
||||
)
|
||||
.post(
|
||||
"/api/datasources/s3/getSignedS3URL",
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
datasourceController.getSignedS3URL
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
|
|
|
@ -9,6 +9,7 @@ const {
|
|||
} = require("@budibase/backend-core/permissions")
|
||||
const env = require("../../environment")
|
||||
const { paramResource } = require("../../middleware/resourceId")
|
||||
const datasourceController = require("../controllers/datasource")
|
||||
|
||||
const router = Router()
|
||||
|
||||
|
@ -46,5 +47,10 @@ router
|
|||
)
|
||||
// TODO: this likely needs to be secured in some way
|
||||
.get("/:appId/:path*", controller.serveApp)
|
||||
.post(
|
||||
"/api/attachments/:datasourceId/url",
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
controller.getSignedUploadURL
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
|
|
Loading…
Reference in New Issue