Merge branch 'master' of github.com:Budibase/budibase into feature/audit-log-sqs
This commit is contained in:
commit
821708e79a
|
@ -74,8 +74,8 @@
|
||||||
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
||||||
"build:docker:single": "./scripts/build-single-image.sh",
|
"build:docker:single": "./scripts/build-single-image.sh",
|
||||||
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
|
||||||
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
|
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 --push ./hosting/couchdb",
|
||||||
"publish:docker:couch-sqs": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile.v2 -t budibase/couchdb:v3.2.1-sqs --push ./hosting/couchdb",
|
"publish:docker:couch-sqs": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile.v2 -t budibase/couchdb:v3.3.3-sqs --push ./hosting/couchdb",
|
||||||
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
|
||||||
"release:helm": "node scripts/releaseHelmChart",
|
"release:helm": "node scripts/releaseHelmChart",
|
||||||
"env:multi:enable": "lerna run --stream env:multi:enable",
|
"env:multi:enable": "lerna run --stream env:multi:enable",
|
||||||
|
|
|
@ -309,7 +309,7 @@
|
||||||
{#if links?.length}
|
{#if links?.length}
|
||||||
<DataSourceCategory
|
<DataSourceCategory
|
||||||
dividerState={true}
|
dividerState={true}
|
||||||
heading="Links"
|
heading="Relationships"
|
||||||
dataSet={links}
|
dataSet={links}
|
||||||
{value}
|
{value}
|
||||||
onSelect={handleSelected}
|
onSelect={handleSelected}
|
||||||
|
|
|
@ -64,13 +64,11 @@ describe("rest", () => {
|
||||||
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
cached = await getCachedVariable(basedOnQuery._id!, "foo")
|
||||||
expect(cached).toBeNull()
|
expect(cached).toBeNull()
|
||||||
|
|
||||||
nock("http://one.example.com")
|
const body1 = [{ name: "one" }]
|
||||||
.get("/")
|
const body2 = [{ name: "two" }]
|
||||||
.reply(200, [{ name: "one" }])
|
nock("http://one.example.com").get("/").reply(200, body1)
|
||||||
nock("http://two.example.com").get("/?test=one").reply(500)
|
nock("http://two.example.com").get("/?test=one").reply(500)
|
||||||
nock("http://two.example.com")
|
nock("http://two.example.com").get("/?test=one").reply(200, body2)
|
||||||
.get("/?test=one")
|
|
||||||
.reply(200, [{ name: "two" }])
|
|
||||||
|
|
||||||
const res = await config.api.query.preview({
|
const res = await config.api.query.preview({
|
||||||
datasourceId: datasource._id!,
|
datasourceId: datasource._id!,
|
||||||
|
|
|
@ -148,6 +148,10 @@ class RestIntegration implements IntegrationBase {
|
||||||
response.headers,
|
response.headers,
|
||||||
{ downloadImages: this.config.downloadImages }
|
{ downloadImages: this.config.downloadImages }
|
||||||
)
|
)
|
||||||
|
let contentLength = response.headers.get("content-length")
|
||||||
|
if (!contentLength && raw) {
|
||||||
|
contentLength = Buffer.byteLength(raw, "utf8").toString()
|
||||||
|
}
|
||||||
if (
|
if (
|
||||||
contentDisposition.includes("filename") ||
|
contentDisposition.includes("filename") ||
|
||||||
contentDisposition.includes("attachment") ||
|
contentDisposition.includes("attachment") ||
|
||||||
|
@ -156,36 +160,46 @@ class RestIntegration implements IntegrationBase {
|
||||||
filename =
|
filename =
|
||||||
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
path.basename(parse(contentDisposition).parameters?.filename) || ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let triedParsing: boolean = false,
|
||||||
|
responseTxt: string | undefined
|
||||||
try {
|
try {
|
||||||
if (filename) {
|
if (filename) {
|
||||||
return handleFileResponse(response, filename, this.startTimeMs)
|
return handleFileResponse(response, filename, this.startTimeMs)
|
||||||
} else {
|
} else {
|
||||||
|
responseTxt = response.text ? await response.text() : ""
|
||||||
|
const hasContent =
|
||||||
|
(contentLength && parseInt(contentLength) > 0) ||
|
||||||
|
responseTxt.length > 0
|
||||||
if (response.status === 204) {
|
if (response.status === 204) {
|
||||||
data = []
|
data = []
|
||||||
raw = ""
|
raw = ""
|
||||||
} else if (contentType.includes("application/json")) {
|
} else if (hasContent && contentType.includes("application/json")) {
|
||||||
data = await response.json()
|
triedParsing = true
|
||||||
raw = JSON.stringify(data)
|
data = JSON.parse(responseTxt)
|
||||||
|
raw = responseTxt
|
||||||
} else if (
|
} else if (
|
||||||
contentType.includes("text/xml") ||
|
(hasContent && contentType.includes("text/xml")) ||
|
||||||
contentType.includes("application/xml")
|
contentType.includes("application/xml")
|
||||||
) {
|
) {
|
||||||
let xmlResponse = await handleXml(response)
|
triedParsing = true
|
||||||
|
let xmlResponse = await handleXml(responseTxt)
|
||||||
data = xmlResponse.data
|
data = xmlResponse.data
|
||||||
raw = xmlResponse.rawXml
|
raw = xmlResponse.rawXml
|
||||||
} else {
|
} else {
|
||||||
data = await response.text()
|
data = responseTxt
|
||||||
raw = data as string
|
raw = data as string
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
throw `Failed to parse response body: ${err}`
|
if (triedParsing) {
|
||||||
|
data = responseTxt
|
||||||
|
raw = data as string
|
||||||
|
} else {
|
||||||
|
throw new Error(`Failed to parse response body: ${err}`)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let contentLength = response.headers.get("content-length")
|
|
||||||
if (!contentLength && raw) {
|
|
||||||
contentLength = Buffer.byteLength(raw, "utf8").toString()
|
|
||||||
}
|
|
||||||
const size = formatBytes(contentLength || "0")
|
const size = formatBytes(contentLength || "0")
|
||||||
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
const time = `${Math.round(performance.now() - this.startTimeMs)}ms`
|
||||||
headers = response.headers.raw()
|
headers = response.headers.raw()
|
||||||
|
|
|
@ -1,19 +1,27 @@
|
||||||
jest.mock("node-fetch", () => {
|
jest.mock("node-fetch", () => {
|
||||||
|
const obj = {
|
||||||
|
my_next_cursor: 123,
|
||||||
|
}
|
||||||
|
const str = JSON.stringify(obj)
|
||||||
return jest.fn(() => ({
|
return jest.fn(() => ({
|
||||||
headers: {
|
headers: {
|
||||||
raw: () => {
|
raw: () => {
|
||||||
return { "content-type": ["application/json"] }
|
return {
|
||||||
|
"content-type": ["application/json"],
|
||||||
|
"content-length": str.length,
|
||||||
|
}
|
||||||
},
|
},
|
||||||
get: (name: string) => {
|
get: (name: string) => {
|
||||||
if (name.toLowerCase() === "content-type") {
|
const lcName = name.toLowerCase()
|
||||||
|
if (lcName === "content-type") {
|
||||||
return ["application/json"]
|
return ["application/json"]
|
||||||
|
} else if (lcName === "content-length") {
|
||||||
|
return str.length
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
json: jest.fn(() => ({
|
json: jest.fn(() => obj),
|
||||||
my_next_cursor: 123,
|
text: jest.fn(() => str),
|
||||||
})),
|
|
||||||
text: jest.fn(),
|
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -231,7 +239,8 @@ describe("REST Integration", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should be able to parse JSON response", async () => {
|
it("should be able to parse JSON response", async () => {
|
||||||
const input = buildInput({ a: 1 }, null, "application/json")
|
const obj = { a: 1 }
|
||||||
|
const input = buildInput(obj, JSON.stringify(obj), "application/json")
|
||||||
const output = await config.integration.parseResponse(input)
|
const output = await config.integration.parseResponse(input)
|
||||||
expect(output.data).toEqual({ a: 1 })
|
expect(output.data).toEqual({ a: 1 })
|
||||||
expect(output.info.code).toEqual(200)
|
expect(output.info.code).toEqual(200)
|
||||||
|
@ -261,7 +270,7 @@ describe("REST Integration", () => {
|
||||||
test.each([...contentTypes, undefined])(
|
test.each([...contentTypes, undefined])(
|
||||||
"should not throw an error on 204 no content",
|
"should not throw an error on 204 no content",
|
||||||
async contentType => {
|
async contentType => {
|
||||||
const input = buildInput(undefined, null, contentType, 204)
|
const input = buildInput(undefined, "", contentType, 204)
|
||||||
const output = await config.integration.parseResponse(input)
|
const output = await config.integration.parseResponse(input)
|
||||||
expect(output.data).toEqual([])
|
expect(output.data).toEqual([])
|
||||||
expect(output.extra.raw).toEqual("")
|
expect(output.extra.raw).toEqual("")
|
||||||
|
|
|
@ -330,10 +330,8 @@ export function checkExternalTables(
|
||||||
return errors
|
return errors
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function handleXml(response: any) {
|
export async function handleXml(rawXml: string) {
|
||||||
let data,
|
let data =
|
||||||
rawXml = await response.text()
|
|
||||||
data =
|
|
||||||
(await xmlParser(rawXml, {
|
(await xmlParser(rawXml, {
|
||||||
explicitArray: false,
|
explicitArray: false,
|
||||||
trim: true,
|
trim: true,
|
||||||
|
|
|
@ -22,5 +22,29 @@
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"rimraf": "3.0.2",
|
"rimraf": "3.0.2",
|
||||||
"typescript": "5.2.2"
|
"typescript": "5.2.2"
|
||||||
|
},
|
||||||
|
"nx": {
|
||||||
|
"targets": {
|
||||||
|
"build": {
|
||||||
|
"dependsOn": [
|
||||||
|
{
|
||||||
|
"projects": [
|
||||||
|
"@budibase/types"
|
||||||
|
],
|
||||||
|
"target": "build"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"dev": {
|
||||||
|
"dependsOn": [
|
||||||
|
{
|
||||||
|
"projects": [
|
||||||
|
"@budibase/types"
|
||||||
|
],
|
||||||
|
"target": "build"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
"version": "0.0.0",
|
"version": "0.0.0",
|
||||||
"description": "Budibase types",
|
"description": "Budibase types",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "src/index.ts",
|
"types": "dist/index.d.ts",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
|
Loading…
Reference in New Issue