Add S3 presigned request generation and file upload capabilities
This commit is contained in:
parent
4f7499cafe
commit
159c951262
|
@ -3335,13 +3335,18 @@
|
|||
{
|
||||
"type": "dataSource/s3",
|
||||
"label": "S3 Datasource",
|
||||
"key": "datasource"
|
||||
"key": "datasourceId"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"label": "Bucket",
|
||||
"key": "bucket"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"label": "File Name",
|
||||
"key": "key"
|
||||
},
|
||||
{
|
||||
"type": "boolean",
|
||||
"label": "Disabled",
|
||||
|
|
|
@ -36,7 +36,11 @@ const makeApiCall = async ({ method, url, body, json = true }) => {
|
|||
})
|
||||
switch (response.status) {
|
||||
case 200:
|
||||
return response.json()
|
||||
try {
|
||||
return await response.json()
|
||||
} catch (error) {
|
||||
return null
|
||||
}
|
||||
case 401:
|
||||
notificationStore.actions.error("Invalid credentials")
|
||||
return handleError(`Invalid credentials`)
|
||||
|
@ -82,14 +86,15 @@ const makeCachedApiCall = async params => {
|
|||
* Constructs an API call function for a particular HTTP method.
|
||||
*/
|
||||
const requestApiCall = method => async params => {
|
||||
const { url, cache = false } = params
|
||||
const fixedUrl = `/${url}`.replace("//", "/")
|
||||
const { external = false, url, cache = false } = params
|
||||
const fixedUrl = external ? url : `/${url}`.replace("//", "/")
|
||||
const enrichedParams = { ...params, method, url: fixedUrl }
|
||||
return await (cache ? makeCachedApiCall : makeApiCall)(enrichedParams)
|
||||
}
|
||||
|
||||
export default {
|
||||
post: requestApiCall("POST"),
|
||||
put: requestApiCall("PUT"),
|
||||
get: requestApiCall("GET"),
|
||||
patch: requestApiCall("PATCH"),
|
||||
del: requestApiCall("DELETE"),
|
||||
|
|
|
@ -10,3 +10,12 @@ export const uploadAttachment = async (data, tableId = "") => {
|
|||
json: false,
|
||||
})
|
||||
}
|
||||
|
||||
export const uploadToS3 = async (signedUrl, data) => {
|
||||
await API.put({
|
||||
url: signedUrl,
|
||||
body: data,
|
||||
json: false,
|
||||
external: true,
|
||||
})
|
||||
}
|
||||
|
|
|
@ -8,6 +8,7 @@ import {
|
|||
convertJSONSchemaToTableSchema,
|
||||
getJSONArrayDatasourceSchema,
|
||||
} from "builder/src/builderStore/jsonUtils"
|
||||
import API from "./api.js"
|
||||
|
||||
/**
|
||||
* Fetches all rows for a particular Budibase data source.
|
||||
|
@ -131,3 +132,14 @@ export const fetchDatasourceSchema = async dataSource => {
|
|||
})
|
||||
return { ...schema, ...jsonAdditions }
|
||||
}
|
||||
|
||||
export const getSignedS3URL = async (datasourceId, bucket, key) => {
|
||||
if (!datasourceId) {
|
||||
return null
|
||||
}
|
||||
const res = await API.post({
|
||||
url: `/api/datasources/s3/getSignedS3URL`,
|
||||
body: { datasourceId, bucket, key },
|
||||
})
|
||||
return res?.signedUrl
|
||||
}
|
||||
|
|
|
@ -2,9 +2,11 @@
|
|||
import Field from "./Field.svelte"
|
||||
import { CoreDropzone } from "@budibase/bbui"
|
||||
import { getContext, onMount, onDestroy } from "svelte"
|
||||
import { getSignedS3URL } from "../../../api/index.js"
|
||||
|
||||
export let dataSource
|
||||
export let datasourceId
|
||||
export let bucket
|
||||
export let key
|
||||
export let field
|
||||
export let label
|
||||
export let disabled = false
|
||||
|
@ -40,7 +42,8 @@
|
|||
}
|
||||
|
||||
const upload = async () => {
|
||||
console.log("UPLOADING!!!")
|
||||
const url = await API.getSignedS3URL(datasourceId, bucket, key)
|
||||
await API.uploadToS3(url, file)
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
authStore,
|
||||
stateStore,
|
||||
uploadStore,
|
||||
notificationStore,
|
||||
} from "stores"
|
||||
import { saveRow, deleteRow, executeQuery, triggerAutomation } from "api"
|
||||
import { ActionTypes } from "constants"
|
||||
|
@ -164,6 +165,7 @@ const s3UploadHandler = async action => {
|
|||
return
|
||||
}
|
||||
await uploadStore.actions.processFileUpload(componentId)
|
||||
notificationStore.actions.success("File uploaded successfully")
|
||||
}
|
||||
|
||||
const handlerMap = {
|
||||
|
|
|
@ -10,6 +10,9 @@ const {
|
|||
const { BuildSchemaErrors, InvalidColumns } = require("../../constants")
|
||||
const { integrations } = require("../../integrations")
|
||||
const { getDatasourceAndQuery } = require("./row/utils")
|
||||
const AWS = require("aws-sdk")
|
||||
const env = require("../../environment")
|
||||
const AWS_REGION = env.AWS_REGION ? env.AWS_REGION : "eu-west-1"
|
||||
|
||||
exports.fetch = async function (ctx) {
|
||||
const database = new CouchDB(ctx.appId)
|
||||
|
@ -152,6 +155,35 @@ exports.query = async function (ctx) {
|
|||
}
|
||||
}
|
||||
|
||||
exports.getSignedS3URL = async function (ctx) {
|
||||
const { datasourceId, bucket, key } = ctx.request.body || {}
|
||||
if (!datasourceId || !bucket || !key) {
|
||||
ctx.throw(400, "datasourceId, bucket and key must be specified")
|
||||
return
|
||||
}
|
||||
const database = new CouchDB(ctx.appId)
|
||||
const datasource = await database.get(datasourceId)
|
||||
if (!datasource) {
|
||||
ctx.throw(400, "The specified datasource could not be found")
|
||||
return
|
||||
}
|
||||
let signedUrl
|
||||
try {
|
||||
const s3 = new AWS.S3({
|
||||
region: AWS_REGION,
|
||||
accessKeyId: datasource?.config?.accessKeyId,
|
||||
secretAccessKey: datasource?.config?.secretAccessKey,
|
||||
apiVersion: "2006-03-01",
|
||||
signatureVersion: "v4",
|
||||
})
|
||||
const params = { Bucket: bucket, Key: key }
|
||||
signedUrl = s3.getSignedUrl("putObject", params)
|
||||
} catch (error) {
|
||||
ctx.throw(400, error)
|
||||
}
|
||||
ctx.body = { signedUrl }
|
||||
}
|
||||
|
||||
function getErrorTables(errors, errorType) {
|
||||
return Object.entries(errors)
|
||||
.filter(entry => entry[1] === errorType)
|
||||
|
|
|
@ -93,5 +93,10 @@ router
|
|||
authorized(BUILDER),
|
||||
datasourceController.destroy
|
||||
)
|
||||
.post(
|
||||
"/api/datasources/s3/getSignedS3URL",
|
||||
authorized(PermissionTypes.TABLE, PermissionLevels.READ),
|
||||
datasourceController.getSignedS3URL
|
||||
)
|
||||
|
||||
module.exports = router
|
||||
|
|
Loading…
Reference in New Issue