Merge pull request #15550 from Budibase/fix/s3-upload-and-read

Fix S3 datasource upload and read
This commit is contained in:
Michael Drury 2025-02-13 18:39:48 +00:00 committed by GitHub
commit e3594bb09d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 35 additions and 15 deletions

View File

@ -247,3 +247,7 @@ export function hasCircularStructure(json: any) {
}
return false
}
export function urlHasProtocol(url: string): boolean {
return !!url.match(/^.+:\/\/.+$/)
}

View File

@ -159,6 +159,7 @@
"@types/tar": "6.1.5",
"@types/tmp": "0.2.6",
"@types/uuid": "8.3.4",
"@smithy/types": "4.0.0",
"chance": "^1.1.12",
"copyfiles": "2.4.1",
"docker-compose": "0.23.17",

View File

@ -339,10 +339,13 @@ export const getSignedUploadURL = async function (
ctx.throw(400, "bucket and key values are required")
}
try {
let endpoint = datasource?.config?.endpoint
if (endpoint && !utils.urlHasProtocol(endpoint)) {
endpoint = `https://${endpoint}`
}
const s3 = new S3({
region: awsRegion,
endpoint: datasource?.config?.endpoint || undefined,
endpoint: endpoint,
credentials: {
accessKeyId: datasource?.config?.accessKeyId as string,
secretAccessKey: datasource?.config?.secretAccessKey as string,
@ -350,8 +353,8 @@ export const getSignedUploadURL = async function (
})
const params = { Bucket: bucket, Key: key }
signedUrl = await getSignedUrl(s3, new PutObjectCommand(params))
if (datasource?.config?.endpoint) {
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
if (endpoint) {
publicUrl = `${endpoint}/${bucket}/${key}`
} else {
publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}`
}

View File

@ -7,9 +7,10 @@ import {
ConnectionInfo,
} from "@budibase/types"
import { S3 } from "@aws-sdk/client-s3"
import { S3, S3ClientConfig } from "@aws-sdk/client-s3"
import csv from "csvtojson"
import stream from "stream"
import { NodeJsClient } from "@smithy/types"
interface S3Config {
region: string
@ -157,18 +158,25 @@ const SCHEMA: Integration = {
}
class S3Integration implements IntegrationBase {
private readonly config: S3Config
private client
private readonly config: S3ClientConfig
private client: NodeJsClient<S3>
constructor(config: S3Config) {
this.config = config
if (this.config.endpoint) {
this.config.s3ForcePathStyle = true
this.config = {
forcePathStyle: config.s3ForcePathStyle || true,
credentials: {
accessKeyId: config.accessKeyId,
secretAccessKey: config.secretAccessKey,
},
region: config.region,
}
if (config.endpoint) {
this.config.forcePathStyle = true
} else {
delete this.config.endpoint
}
this.client = new S3(this.config)
this.client = new S3(this.config) as NodeJsClient<S3>
}
async testConnection() {
@ -176,7 +184,9 @@ class S3Integration implements IntegrationBase {
connected: false,
}
try {
await this.client.listBuckets()
await this.client.listBuckets({
MaxBuckets: 1,
})
response.connected = true
} catch (e: any) {
response.error = e.message as string

View File

@ -120,7 +120,7 @@ export function areRESTVariablesValid(datasource: Datasource) {
export function checkDatasourceTypes(schema: Integration, config: any) {
for (let key of Object.keys(config)) {
if (!schema.datasource[key]) {
if (!schema.datasource?.[key]) {
continue
}
const type = schema.datasource[key].type
@ -149,7 +149,9 @@ async function enrichDatasourceWithValues(
) as Datasource
processed.entities = entities
const definition = await getDefinition(processed.source)
processed.config = checkDatasourceTypes(definition!, processed.config)
if (definition) {
processed.config = checkDatasourceTypes(definition, processed.config)
}
return {
datasource: processed,
envVars: env as Record<string, string>,

View File

@ -157,7 +157,7 @@ export interface Integration {
friendlyName: string
type?: string
iconUrl?: string
datasource: DatasourceConfig
datasource?: DatasourceConfig
query: {
[key: string]: QueryDefinition
}