Merge pull request #13835 from Budibase/fix/rest-download-issue
Fixing some REST file download issues
This commit is contained in:
commit
f109c1be64
|
@ -14,6 +14,7 @@ import { v4 } from "uuid"
|
||||||
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
import { APP_PREFIX, APP_DEV_PREFIX } from "../db"
|
||||||
import fsp from "fs/promises"
|
import fsp from "fs/promises"
|
||||||
import { HeadObjectOutput } from "aws-sdk/clients/s3"
|
import { HeadObjectOutput } from "aws-sdk/clients/s3"
|
||||||
|
import { ReadableStream } from "stream/web"
|
||||||
|
|
||||||
const streamPipeline = promisify(stream.pipeline)
|
const streamPipeline = promisify(stream.pipeline)
|
||||||
// use this as a temporary store of buckets that are being created
|
// use this as a temporary store of buckets that are being created
|
||||||
|
@ -41,10 +42,7 @@ type UploadParams = BaseUploadParams & {
|
||||||
path?: string | PathLike
|
path?: string | PathLike
|
||||||
}
|
}
|
||||||
|
|
||||||
export type StreamTypes =
|
export type StreamTypes = ReadStream | NodeJS.ReadableStream
|
||||||
| ReadStream
|
|
||||||
| NodeJS.ReadableStream
|
|
||||||
| ReadableStream<Uint8Array>
|
|
||||||
|
|
||||||
export type StreamUploadParams = BaseUploadParams & {
|
export type StreamUploadParams = BaseUploadParams & {
|
||||||
stream?: StreamTypes
|
stream?: StreamTypes
|
||||||
|
@ -222,6 +220,9 @@ export async function streamUpload({
|
||||||
extra,
|
extra,
|
||||||
ttl,
|
ttl,
|
||||||
}: StreamUploadParams) {
|
}: StreamUploadParams) {
|
||||||
|
if (!stream) {
|
||||||
|
throw new Error("Stream to upload is invalid/undefined")
|
||||||
|
}
|
||||||
const extension = filename.split(".").pop()
|
const extension = filename.split(".").pop()
|
||||||
const objectStore = ObjectStore(bucketName)
|
const objectStore = ObjectStore(bucketName)
|
||||||
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
const bucketCreated = await createBucketIfNotExists(objectStore, bucketName)
|
||||||
|
@ -251,14 +252,27 @@ export async function streamUpload({
|
||||||
: CONTENT_TYPE_MAP.txt
|
: CONTENT_TYPE_MAP.txt
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const bucket = sanitizeBucket(bucketName),
|
||||||
|
objKey = sanitizeKey(filename)
|
||||||
const params = {
|
const params = {
|
||||||
Bucket: sanitizeBucket(bucketName),
|
Bucket: bucket,
|
||||||
Key: sanitizeKey(filename),
|
Key: objKey,
|
||||||
Body: stream,
|
Body: stream,
|
||||||
ContentType: contentType,
|
ContentType: contentType,
|
||||||
...extra,
|
...extra,
|
||||||
}
|
}
|
||||||
return objectStore.upload(params).promise()
|
|
||||||
|
const details = await objectStore.upload(params).promise()
|
||||||
|
const headDetails = await objectStore
|
||||||
|
.headObject({
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: objKey,
|
||||||
|
})
|
||||||
|
.promise()
|
||||||
|
return {
|
||||||
|
...details,
|
||||||
|
ContentLength: headDetails.ContentLength,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -68,7 +68,6 @@
|
||||||
"aws-sdk": "2.1030.0",
|
"aws-sdk": "2.1030.0",
|
||||||
"bcrypt": "5.1.0",
|
"bcrypt": "5.1.0",
|
||||||
"bcryptjs": "2.4.3",
|
"bcryptjs": "2.4.3",
|
||||||
"bl": "^6.0.12",
|
|
||||||
"bull": "4.10.1",
|
"bull": "4.10.1",
|
||||||
"chokidar": "3.5.3",
|
"chokidar": "3.5.3",
|
||||||
"content-disposition": "^0.5.4",
|
"content-disposition": "^0.5.4",
|
||||||
|
@ -116,7 +115,8 @@
|
||||||
"uuid": "^8.3.2",
|
"uuid": "^8.3.2",
|
||||||
"validate.js": "0.13.1",
|
"validate.js": "0.13.1",
|
||||||
"worker-farm": "1.7.0",
|
"worker-farm": "1.7.0",
|
||||||
"xml2js": "0.5.0"
|
"xml2js": "0.5.0",
|
||||||
|
"tmp": "0.2.3"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@babel/preset-env": "7.16.11",
|
"@babel/preset-env": "7.16.11",
|
||||||
|
@ -137,6 +137,7 @@
|
||||||
"@types/supertest": "2.0.14",
|
"@types/supertest": "2.0.14",
|
||||||
"@types/tar": "6.1.5",
|
"@types/tar": "6.1.5",
|
||||||
"@types/uuid": "8.3.4",
|
"@types/uuid": "8.3.4",
|
||||||
|
"@types/tmp": "0.2.6",
|
||||||
"copyfiles": "2.4.1",
|
"copyfiles": "2.4.1",
|
||||||
"docker-compose": "0.23.17",
|
"docker-compose": "0.23.17",
|
||||||
"jest": "29.7.0",
|
"jest": "29.7.0",
|
||||||
|
|
|
@ -149,13 +149,12 @@ class RestIntegration implements IntegrationBase {
|
||||||
{ downloadImages: this.config.downloadImages }
|
{ downloadImages: this.config.downloadImages }
|
||||||
)
|
)
|
||||||
let contentLength = response.headers.get("content-length")
|
let contentLength = response.headers.get("content-length")
|
||||||
if (!contentLength && raw) {
|
let isSuccess = response.status >= 200 && response.status < 300
|
||||||
contentLength = Buffer.byteLength(raw, "utf8").toString()
|
|
||||||
}
|
|
||||||
if (
|
if (
|
||||||
contentDisposition.includes("filename") ||
|
(contentDisposition.includes("filename") ||
|
||||||
contentDisposition.includes("attachment") ||
|
contentDisposition.includes("attachment") ||
|
||||||
contentDisposition.includes("form-data")
|
contentDisposition.includes("form-data")) &&
|
||||||
|
isSuccess
|
||||||
) {
|
) {
|
||||||
filename =
|
filename =
|
||||||
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
||||||
|
@ -168,6 +167,9 @@ class RestIntegration implements IntegrationBase {
|
||||||
return handleFileResponse(response, filename, this.startTimeMs)
|
return handleFileResponse(response, filename, this.startTimeMs)
|
||||||
} else {
|
} else {
|
||||||
responseTxt = response.text ? await response.text() : ""
|
responseTxt = response.text ? await response.text() : ""
|
||||||
|
if (!contentLength && responseTxt) {
|
||||||
|
contentLength = Buffer.byteLength(responseTxt, "utf8").toString()
|
||||||
|
}
|
||||||
const hasContent =
|
const hasContent =
|
||||||
(contentLength && parseInt(contentLength) > 0) ||
|
(contentLength && parseInt(contentLength) > 0) ||
|
||||||
responseTxt.length > 0
|
responseTxt.length > 0
|
||||||
|
|
|
@ -657,6 +657,7 @@ describe("REST Integration", () => {
|
||||||
mockReadable.push(null)
|
mockReadable.push(null)
|
||||||
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
||||||
Promise.resolve({
|
Promise.resolve({
|
||||||
|
status: 200,
|
||||||
headers: {
|
headers: {
|
||||||
raw: () => ({
|
raw: () => ({
|
||||||
"content-type": [contentType],
|
"content-type": [contentType],
|
||||||
|
@ -700,6 +701,7 @@ describe("REST Integration", () => {
|
||||||
mockReadable.push(null)
|
mockReadable.push(null)
|
||||||
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
;(fetch as unknown as jest.Mock).mockImplementationOnce(() =>
|
||||||
Promise.resolve({
|
Promise.resolve({
|
||||||
|
status: 200,
|
||||||
headers: {
|
headers: {
|
||||||
raw: () => ({
|
raw: () => ({
|
||||||
"content-type": [contentType],
|
"content-type": [contentType],
|
||||||
|
|
|
@ -9,10 +9,12 @@ import { context, objectStore, sql } from "@budibase/backend-core"
|
||||||
import { v4 } from "uuid"
|
import { v4 } from "uuid"
|
||||||
import { parseStringPromise as xmlParser } from "xml2js"
|
import { parseStringPromise as xmlParser } from "xml2js"
|
||||||
import { formatBytes } from "../../utilities"
|
import { formatBytes } from "../../utilities"
|
||||||
import bl from "bl"
|
|
||||||
import env from "../../environment"
|
import env from "../../environment"
|
||||||
import { InvalidColumns } from "../../constants"
|
import { InvalidColumns } from "../../constants"
|
||||||
import { helpers, utils } from "@budibase/shared-core"
|
import { helpers, utils } from "@budibase/shared-core"
|
||||||
|
import { pipeline } from "stream/promises"
|
||||||
|
import tmp from "tmp"
|
||||||
|
import fs from "fs"
|
||||||
|
|
||||||
type PrimitiveTypes =
|
type PrimitiveTypes =
|
||||||
| FieldType.STRING
|
| FieldType.STRING
|
||||||
|
@ -360,35 +362,44 @@ export async function handleFileResponse(
|
||||||
const key = `${context.getProdAppId()}/${processedFileName}`
|
const key = `${context.getProdAppId()}/${processedFileName}`
|
||||||
const bucket = objectStore.ObjectStoreBuckets.TEMP
|
const bucket = objectStore.ObjectStoreBuckets.TEMP
|
||||||
|
|
||||||
const stream = response.body.pipe(bl((error, data) => data))
|
// put the response stream to disk temporarily as a buffer
|
||||||
|
const tmpObj = tmp.fileSync()
|
||||||
|
try {
|
||||||
|
await pipeline(response.body, fs.createWriteStream(tmpObj.name))
|
||||||
|
if (response.body) {
|
||||||
|
const contentLength = response.headers.get("content-length")
|
||||||
|
if (contentLength) {
|
||||||
|
size = parseInt(contentLength, 10)
|
||||||
|
}
|
||||||
|
|
||||||
if (response.body) {
|
const details = await objectStore.streamUpload({
|
||||||
const contentLength = response.headers.get("content-length")
|
bucket,
|
||||||
if (contentLength) {
|
filename: key,
|
||||||
size = parseInt(contentLength, 10)
|
stream: fs.createReadStream(tmpObj.name),
|
||||||
|
ttl: 1,
|
||||||
|
type: response.headers["content-type"],
|
||||||
|
})
|
||||||
|
if (!size && details.ContentLength) {
|
||||||
|
size = details.ContentLength
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
presignedUrl = objectStore.getPresignedUrl(bucket, key)
|
||||||
await objectStore.streamUpload({
|
return {
|
||||||
bucket,
|
data: {
|
||||||
filename: key,
|
size,
|
||||||
stream,
|
name: processedFileName,
|
||||||
ttl: 1,
|
url: presignedUrl,
|
||||||
type: response.headers["content-type"],
|
extension: fileExtension,
|
||||||
})
|
key: key,
|
||||||
}
|
},
|
||||||
presignedUrl = objectStore.getPresignedUrl(bucket, key)
|
info: {
|
||||||
return {
|
code: response.status,
|
||||||
data: {
|
size: formatBytes(size.toString()),
|
||||||
size,
|
time: `${Math.round(performance.now() - startTime)}ms`,
|
||||||
name: processedFileName,
|
},
|
||||||
url: presignedUrl,
|
}
|
||||||
extension: fileExtension,
|
} finally {
|
||||||
key: key,
|
// cleanup tmp
|
||||||
},
|
tmpObj.removeCallback()
|
||||||
info: {
|
|
||||||
code: response.status,
|
|
||||||
size: formatBytes(size.toString()),
|
|
||||||
time: `${Math.round(performance.now() - startTime)}ms`,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -196,12 +196,22 @@ class QueryRunner {
|
||||||
return { rows, keys, info, extra, pagination }
|
return { rows, keys, info, extra, pagination }
|
||||||
}
|
}
|
||||||
|
|
||||||
async runAnotherQuery(queryId: string, parameters: any) {
|
async runAnotherQuery(
|
||||||
|
queryId: string,
|
||||||
|
currentParameters: Record<string, any>
|
||||||
|
) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const query = await db.get<Query>(queryId)
|
const query = await db.get<Query>(queryId)
|
||||||
const datasource = await sdk.datasources.get(query.datasourceId, {
|
const datasource = await sdk.datasources.get(query.datasourceId, {
|
||||||
enriched: true,
|
enriched: true,
|
||||||
})
|
})
|
||||||
|
// enrich parameters with dynamic queries defaults
|
||||||
|
const defaultParams = query.parameters || []
|
||||||
|
for (let param of defaultParams) {
|
||||||
|
if (!currentParameters[param.name]) {
|
||||||
|
currentParameters[param.name] = param.default
|
||||||
|
}
|
||||||
|
}
|
||||||
return new QueryRunner(
|
return new QueryRunner(
|
||||||
{
|
{
|
||||||
schema: query.schema,
|
schema: query.schema,
|
||||||
|
@ -210,7 +220,7 @@ class QueryRunner {
|
||||||
transformer: query.transformer,
|
transformer: query.transformer,
|
||||||
nullDefaultSupport: query.nullDefaultSupport,
|
nullDefaultSupport: query.nullDefaultSupport,
|
||||||
ctx: this.ctx,
|
ctx: this.ctx,
|
||||||
parameters,
|
parameters: currentParameters,
|
||||||
datasource,
|
datasource,
|
||||||
queryId,
|
queryId,
|
||||||
},
|
},
|
||||||
|
|
|
@ -245,7 +245,7 @@ export type AutomationAttachment = {
|
||||||
|
|
||||||
export type AutomationAttachmentContent = {
|
export type AutomationAttachmentContent = {
|
||||||
filename: string
|
filename: string
|
||||||
content: ReadStream | NodeJS.ReadableStream | ReadableStream<Uint8Array>
|
content: ReadStream | NodeJS.ReadableStream
|
||||||
}
|
}
|
||||||
|
|
||||||
export type BucketedContent = AutomationAttachmentContent & {
|
export type BucketedContent = AutomationAttachmentContent & {
|
||||||
|
|
20
yarn.lock
20
yarn.lock
|
@ -6348,6 +6348,11 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/estree" "*"
|
"@types/estree" "*"
|
||||||
|
|
||||||
|
"@types/tmp@0.2.6":
|
||||||
|
version "0.2.6"
|
||||||
|
resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.2.6.tgz#d785ee90c52d7cc020e249c948c36f7b32d1e217"
|
||||||
|
integrity sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA==
|
||||||
|
|
||||||
"@types/tough-cookie@*", "@types/tough-cookie@^4.0.2":
|
"@types/tough-cookie@*", "@types/tough-cookie@^4.0.2":
|
||||||
version "4.0.2"
|
version "4.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
|
resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397"
|
||||||
|
@ -7700,7 +7705,7 @@ bl@^4.0.3, bl@^4.1.0:
|
||||||
inherits "^2.0.4"
|
inherits "^2.0.4"
|
||||||
readable-stream "^3.4.0"
|
readable-stream "^3.4.0"
|
||||||
|
|
||||||
bl@^6.0.12, bl@^6.0.3:
|
bl@^6.0.3:
|
||||||
version "6.0.12"
|
version "6.0.12"
|
||||||
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8"
|
resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8"
|
||||||
integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w==
|
integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w==
|
||||||
|
@ -16065,10 +16070,10 @@ mute-stream@~1.0.0:
|
||||||
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e"
|
resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e"
|
||||||
integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==
|
integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==
|
||||||
|
|
||||||
mysql2@3.9.7:
|
mysql2@3.9.8:
|
||||||
version "3.9.7"
|
version "3.9.8"
|
||||||
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.7.tgz#843755daf65b5ef08afe545fe14b8fb62824741a"
|
resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.8.tgz#fe8a0f975f2c495ed76ca988ddc5505801dc49ce"
|
||||||
integrity sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw==
|
integrity sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA==
|
||||||
dependencies:
|
dependencies:
|
||||||
denque "^2.1.0"
|
denque "^2.1.0"
|
||||||
generate-function "^2.3.1"
|
generate-function "^2.3.1"
|
||||||
|
@ -21283,6 +21288,11 @@ tlhunter-sorted-set@^0.1.0:
|
||||||
resolved "https://registry.yarnpkg.com/tlhunter-sorted-set/-/tlhunter-sorted-set-0.1.0.tgz#1c3eae28c0fa4dff97e9501d2e3c204b86406f4b"
|
resolved "https://registry.yarnpkg.com/tlhunter-sorted-set/-/tlhunter-sorted-set-0.1.0.tgz#1c3eae28c0fa4dff97e9501d2e3c204b86406f4b"
|
||||||
integrity sha512-eGYW4bjf1DtrHzUYxYfAcSytpOkA44zsr7G2n3PV7yOUR23vmkGe3LL4R+1jL9OsXtbsFOwe8XtbCrabeaEFnw==
|
integrity sha512-eGYW4bjf1DtrHzUYxYfAcSytpOkA44zsr7G2n3PV7yOUR23vmkGe3LL4R+1jL9OsXtbsFOwe8XtbCrabeaEFnw==
|
||||||
|
|
||||||
|
tmp@0.2.3:
|
||||||
|
version "0.2.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.3.tgz#eb783cc22bc1e8bebd0671476d46ea4eb32a79ae"
|
||||||
|
integrity sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==
|
||||||
|
|
||||||
tmp@^0.0.33:
|
tmp@^0.0.33:
|
||||||
version "0.0.33"
|
version "0.0.33"
|
||||||
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
|
resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
|
||||||
|
|
Loading…
Reference in New Issue