Merge branch 'master' into table-width-setting
This commit is contained in:
commit
657da4b1c7
|
@ -9,3 +9,4 @@ Contributors
|
|||
* Michael Drury - [@mike12345567](https://github.com/mike12345567)
|
||||
* Peter Clement - [@PClmnt](https://github.com/PClmnt)
|
||||
* Rory Powell - [@Rory-Powell](https://github.com/Rory-Powell)
|
||||
* Michaël St-Georges [@CSLTech](https://github.com/CSLTech)
|
|
@ -5,7 +5,7 @@
|
|||
export let value = null
|
||||
|
||||
$: dataSources = $datasources.list
|
||||
.filter(ds => ds.source === "S3" && !ds.config?.endpoint)
|
||||
.filter(ds => ds.source === "S3")
|
||||
.map(ds => ({
|
||||
label: ds.name,
|
||||
value: ds._id,
|
||||
|
|
|
@ -17,8 +17,10 @@ module FetchMock {
|
|||
raw: () => {
|
||||
return { "content-type": ["application/json"] }
|
||||
},
|
||||
get: () => {
|
||||
get: (name: string) => {
|
||||
if (name.toLowerCase() === "content-type") {
|
||||
return ["application/json"]
|
||||
}
|
||||
},
|
||||
},
|
||||
json: async () => {
|
||||
|
|
|
@ -292,11 +292,6 @@ export const getSignedUploadURL = async function (ctx: Ctx) {
|
|||
ctx.throw(400, "The specified datasource could not be found")
|
||||
}
|
||||
|
||||
// Ensure we aren't using a custom endpoint
|
||||
if (datasource?.config?.endpoint) {
|
||||
ctx.throw(400, "S3 datasources with custom endpoints are not supported")
|
||||
}
|
||||
|
||||
// Determine type of datasource and generate signed URL
|
||||
let signedUrl
|
||||
let publicUrl
|
||||
|
@ -309,6 +304,7 @@ export const getSignedUploadURL = async function (ctx: Ctx) {
|
|||
try {
|
||||
const s3 = new AWS.S3({
|
||||
region: awsRegion,
|
||||
endpoint: datasource?.config?.endpoint as string,
|
||||
accessKeyId: datasource?.config?.accessKeyId as string,
|
||||
secretAccessKey: datasource?.config?.secretAccessKey as string,
|
||||
apiVersion: "2006-03-01",
|
||||
|
@ -316,7 +312,11 @@ export const getSignedUploadURL = async function (ctx: Ctx) {
|
|||
})
|
||||
const params = { Bucket: bucket, Key: key }
|
||||
signedUrl = s3.getSignedUrl("putObject", params)
|
||||
if (datasource?.config?.endpoint) {
|
||||
publicUrl = `${datasource.config.endpoint}/${bucket}/${key}`
|
||||
} else {
|
||||
publicUrl = `https://${bucket}.s3.${awsRegion}.amazonaws.com/${key}`
|
||||
}
|
||||
} catch (error: any) {
|
||||
ctx.throw(400, error)
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@ import get from "lodash/get"
|
|||
import * as https from "https"
|
||||
import qs from "querystring"
|
||||
import fetch from "node-fetch"
|
||||
import type { Response } from "node-fetch"
|
||||
import { formatBytes } from "../utilities"
|
||||
import { performance } from "perf_hooks"
|
||||
import FormData from "form-data"
|
||||
|
@ -25,6 +26,7 @@ import { handleFileResponse, handleXml } from "./utils"
|
|||
import { parse } from "content-disposition"
|
||||
import path from "path"
|
||||
import { Builder as XmlBuilder } from "xml2js"
|
||||
import { getAttachmentHeaders } from "./utils/restUtils"
|
||||
|
||||
enum BodyType {
|
||||
NONE = "none",
|
||||
|
@ -130,14 +132,15 @@ class RestIntegration implements IntegrationBase {
|
|||
this.config = config
|
||||
}
|
||||
|
||||
async parseResponse(response: any, pagination: PaginationConfig | null) {
|
||||
async parseResponse(response: Response, pagination: PaginationConfig | null) {
|
||||
let data: any[] | string | undefined,
|
||||
raw: string | undefined,
|
||||
headers: Record<string, string> = {},
|
||||
headers: Record<string, string[] | string> = {},
|
||||
filename: string | undefined
|
||||
|
||||
const contentType = response.headers.get("content-type") || ""
|
||||
const contentDisposition = response.headers.get("content-disposition") || ""
|
||||
const { contentType, contentDisposition } = getAttachmentHeaders(
|
||||
response.headers
|
||||
)
|
||||
if (
|
||||
contentDisposition.includes("filename") ||
|
||||
contentDisposition.includes("attachment") ||
|
||||
|
@ -172,7 +175,7 @@ class RestIntegration implements IntegrationBase {
|
|||
throw `Failed to parse response body: ${err}`
|
||||
}
|
||||
|
||||
let contentLength: string = response.headers.get("content-length")
|
||||
let contentLength = response.headers.get("content-length")
|
||||
if (!contentLength && raw) {
|
||||
contentLength = Buffer.byteLength(raw, "utf8").toString()
|
||||
}
|
||||
|
|
|
@ -4,7 +4,11 @@ jest.mock("node-fetch", () => {
|
|||
raw: () => {
|
||||
return { "content-type": ["application/json"] }
|
||||
},
|
||||
get: () => ["application/json"],
|
||||
get: (name: string) => {
|
||||
if (name.toLowerCase() === "content-type") {
|
||||
return ["application/json"]
|
||||
}
|
||||
},
|
||||
},
|
||||
json: jest.fn(() => ({
|
||||
my_next_cursor: 123,
|
||||
|
@ -211,7 +215,16 @@ describe("REST Integration", () => {
|
|||
json: json ? async () => json : undefined,
|
||||
text: text ? async () => text : undefined,
|
||||
headers: {
|
||||
get: (key: any) => (key === "content-length" ? 100 : header),
|
||||
get: (key: string) => {
|
||||
switch (key.toLowerCase()) {
|
||||
case "content-length":
|
||||
return 100
|
||||
case "content-type":
|
||||
return header
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
},
|
||||
raw: () => ({ "content-type": header }),
|
||||
},
|
||||
}
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
import { getAttachmentHeaders } from "../utils/restUtils"
|
||||
import type { Headers } from "node-fetch"
|
||||
|
||||
function headers(dispositionValue: string) {
|
||||
return {
|
||||
get: (name: string) => {
|
||||
if (name.toLowerCase() === "content-disposition") {
|
||||
return dispositionValue
|
||||
} else {
|
||||
return "application/pdf"
|
||||
}
|
||||
},
|
||||
set: () => {},
|
||||
} as unknown as Headers
|
||||
}
|
||||
|
||||
describe("getAttachmentHeaders", () => {
|
||||
it("should be able to correctly handle a broken content-disposition", () => {
|
||||
const { contentDisposition } = getAttachmentHeaders(
|
||||
headers(`filename="report.pdf"`)
|
||||
)
|
||||
expect(contentDisposition).toBe(`attachment; filename="report.pdf"`)
|
||||
})
|
||||
|
||||
it("should be able to correctly with a filename that could cause problems", () => {
|
||||
const { contentDisposition } = getAttachmentHeaders(
|
||||
headers(`filename="report;.pdf"`)
|
||||
)
|
||||
expect(contentDisposition).toBe(`attachment; filename="report;.pdf"`)
|
||||
})
|
||||
|
||||
it("should not touch a valid content-disposition", () => {
|
||||
const { contentDisposition } = getAttachmentHeaders(
|
||||
headers(`inline; filename="report.pdf"`)
|
||||
)
|
||||
expect(contentDisposition).toBe(`inline; filename="report.pdf"`)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,28 @@
|
|||
import type { Headers } from "node-fetch"
|
||||
|
||||
export function getAttachmentHeaders(headers: Headers) {
|
||||
const contentType = headers.get("content-type") || ""
|
||||
let contentDisposition = headers.get("content-disposition") || ""
|
||||
|
||||
// the API does not follow the requirements of https://www.ietf.org/rfc/rfc2183.txt
|
||||
// all content-disposition headers should be format disposition-type; parameters
|
||||
// but some APIs do not provide a type, causing the parse below to fail - add one to fix this
|
||||
if (contentDisposition) {
|
||||
const quotesRegex = /"(?:[^"\\]|\\.)*"|;/g
|
||||
let match: RegExpMatchArray | null = null,
|
||||
found = false
|
||||
while ((match = quotesRegex.exec(contentDisposition)) !== null) {
|
||||
if (match[0] === ";") {
|
||||
found = true
|
||||
}
|
||||
}
|
||||
if (!found) {
|
||||
return {
|
||||
contentDisposition: `attachment; ${contentDisposition}`,
|
||||
contentType,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { contentDisposition, contentType }
|
||||
}
|
Loading…
Reference in New Issue