Update S3 upload with develop and fix preview URLs

This commit is contained in:
Andrew Kingston 2022-01-13 17:18:24 +00:00
parent 3f2b6db2b2
commit b3dba52d62
6 changed files with 134 additions and 22 deletions

View File

@ -146,7 +146,9 @@
<img alt="preview" src={selectedUrl} /> <img alt="preview" src={selectedUrl} />
{:else} {:else}
<div class="placeholder"> <div class="placeholder">
<div class="extension">{selectedImage.extension}</div> <div class="extension">
{selectedImage.name || "Unknown file"}
</div>
<div>Preview not supported</div> <div>Preview not supported</div>
</div> </div>
{/if} {/if}
@ -357,18 +359,21 @@
white-space: nowrap; white-space: nowrap;
width: 0; width: 0;
margin-right: 10px; margin-right: 10px;
user-select: all;
} }
.placeholder { .placeholder {
display: flex; display: flex;
flex-direction: column; flex-direction: column;
justify-content: center; justify-content: center;
align-items: center; align-items: center;
text-align: center;
} }
.extension { .extension {
color: var(--spectrum-global-color-gray-600); color: var(--spectrum-global-color-gray-600);
text-transform: uppercase; text-transform: uppercase;
font-weight: 600; font-weight: 600;
margin-bottom: 5px; margin-bottom: 5px;
user-select: all;
} }
.nav { .nav {

View File

@ -11,7 +11,25 @@ export const uploadAttachment = async (data, tableId = "") => {
}) })
} }
export const uploadToS3 = async (signedUrl, data) => { /**
* Generates a signed URL to upload a file to an external datasource.
*/
export const getSignedDatasourceURL = async (datasourceId, bucket, key) => {
if (!datasourceId) {
return null
}
const res = await API.post({
url: `/api/attachments/${datasourceId}/url`,
body: { bucket, key },
})
return res?.signedUrl
}
/**
* Uploads a file to an external datasource.
*/
export const externalUpload = async (datasourceId, bucket, key, data) => {
const signedUrl = await getSignedDatasourceURL(datasourceId, bucket, key)
await API.put({ await API.put({
url: signedUrl, url: signedUrl,
body: data, body: data,

View File

@ -1,8 +1,7 @@
<script> <script>
import Field from "./Field.svelte" import Field from "./Field.svelte"
import { CoreDropzone } from "@budibase/bbui" import { CoreDropzone, ProgressCircle } from "@budibase/bbui"
import { getContext, onMount, onDestroy } from "svelte" import { getContext, onMount, onDestroy } from "svelte"
import { getSignedS3URL } from "../../../api/index.js"
export let datasourceId export let datasourceId
export let bucket export let bucket
@ -17,12 +16,13 @@
const { API, notificationStore, uploadStore } = getContext("sdk") const { API, notificationStore, uploadStore } = getContext("sdk")
const component = getContext("component") const component = getContext("component")
const formContext = getContext("form")
// 5GB cap per item sent via S3 REST API // 5GB cap per item sent via S3 REST API
const MaxFileSize = 1000000000 * 5 const MaxFileSize = 1000000000 * 5
let file // Actual file data to upload
let data
let loading = false
const handleFileTooLarge = () => { const handleFileTooLarge = () => {
notificationStore.actions.warning( notificationStore.actions.warning(
@ -30,20 +30,48 @@
) )
} }
// Process the file input and return a serializable structure expected by
// the dropzone component to display the file
const processFiles = async fileList => { const processFiles = async fileList => {
// let data = new FormData() return await new Promise(resolve => {
// for (let i = 0; i < fileList.length; i++) { if (!fileList?.length) {
// data.append("file", fileList[i]) return []
// } }
// return await API.uploadAttachment(data, formContext?.dataSource?.tableId)
file = fileList[0] // Don't read in non-image files
console.log("processing", fileList) data = fileList[0]
return [] if (!data.type?.startsWith("image")) {
resolve([
{
name: data.name,
type: data.type,
},
])
}
// Read image files and display as preview
const reader = new FileReader()
reader.addEventListener(
"load",
() => {
resolve([
{
url: reader.result,
name: data.name,
type: data.type,
},
])
},
false
)
reader.readAsDataURL(fileList[0])
})
} }
const upload = async () => { const upload = async () => {
const url = await API.getSignedS3URL(datasourceId, bucket, key) loading = true
await API.uploadToS3(url, file) await API.externalUpload(datasourceId, bucket, key, data)
loading = false
} }
onMount(() => { onMount(() => {
@ -68,7 +96,7 @@
{#if fieldState} {#if fieldState}
<CoreDropzone <CoreDropzone
value={fieldState.value} value={fieldState.value}
disabled={fieldState.disabled} disabled={loading || fieldState.disabled}
error={fieldState.error} error={fieldState.error}
on:change={e => { on:change={e => {
fieldApi.setValue(e.detail) fieldApi.setValue(e.detail)
@ -79,4 +107,26 @@
fileSizeLimit={MaxFileSize} fileSizeLimit={MaxFileSize}
/> />
{/if} {/if}
{#if loading}
<div class="overlay" />
<div class="loading">
<ProgressCircle />
</div>
{/if}
</Field> </Field>
<style>
.overlay,
.loading {
position: absolute;
top: 0;
height: 100%;
width: 100%;
display: grid;
place-items: center;
}
.overlay {
background-color: var(--spectrum-global-color-gray-50);
opacity: 0.5;
}
</style>

View File

@ -17,6 +17,8 @@ const { clientLibraryPath } = require("../../../utilities")
const { upload } = require("../../../utilities/fileSystem") const { upload } = require("../../../utilities/fileSystem")
const { attachmentsRelativeURL } = require("../../../utilities") const { attachmentsRelativeURL } = require("../../../utilities")
const { DocumentTypes } = require("../../../db/utils") const { DocumentTypes } = require("../../../db/utils")
const AWS = require("aws-sdk")
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
async function prepareUpload({ s3Key, bucket, metadata, file }) { async function prepareUpload({ s3Key, bucket, metadata, file }) {
const response = await upload({ const response = await upload({
@ -104,3 +106,39 @@ exports.serveClientLibrary = async function (ctx) {
root: join(NODE_MODULES_PATH, "@budibase", "client", "dist"), root: join(NODE_MODULES_PATH, "@budibase", "client", "dist"),
}) })
} }
exports.getSignedUploadURL = async function (ctx) {
// Ensure datasource is valid
const { datasourceId } = ctx.params
const database = new CouchDB(ctx.appId)
const datasource = await database.get(datasourceId)
if (!datasource) {
ctx.throw(400, "The specified datasource could not be found")
return
}
// Determine type of datasource and generate signed URL
let signedUrl
if (datasource.source === "S3") {
const { bucket, key } = ctx.request.body || {}
if (!bucket || !key) {
ctx.throw(400, "datasourceId, bucket and key must be specified")
return
}
try {
const s3 = new AWS.S3({
region: AWS_REGION,
accessKeyId: datasource?.config?.accessKeyId,
secretAccessKey: datasource?.config?.secretAccessKey,
apiVersion: "2006-03-01",
signatureVersion: "v4",
})
const params = { Bucket: bucket, Key: key }
signedUrl = s3.getSignedUrl("putObject", params)
} catch (error) {
ctx.throw(400, error)
}
}
ctx.body = { signedUrl }
}

View File

@ -93,10 +93,5 @@ router
authorized(BUILDER), authorized(BUILDER),
datasourceController.destroy datasourceController.destroy
) )
.post(
"/api/datasources/s3/getSignedS3URL",
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
datasourceController.getSignedS3URL
)
module.exports = router module.exports = router

View File

@ -9,6 +9,7 @@ const {
} = require("@budibase/backend-core/permissions") } = require("@budibase/backend-core/permissions")
const env = require("../../environment") const env = require("../../environment")
const { paramResource } = require("../../middleware/resourceId") const { paramResource } = require("../../middleware/resourceId")
const datasourceController = require("../controllers/datasource")
const router = Router() const router = Router()
@ -46,5 +47,10 @@ router
) )
// TODO: this likely needs to be secured in some way // TODO: this likely needs to be secured in some way
.get("/:appId/:path*", controller.serveApp) .get("/:appId/:path*", controller.serveApp)
.post(
"/api/attachments/:datasourceId/url",
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
controller.getSignedUploadURL
)
module.exports = router module.exports = router