Merge branch 'master' of github.com:budibase/budibase into plugin-typing
This commit is contained in:
commit
a842be2296
|
@ -30,7 +30,7 @@ env:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -47,7 +47,7 @@ jobs:
|
||||||
- run: yarn lint
|
- run: yarn lint
|
||||||
|
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -76,7 +76,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
helm-lint:
|
helm-lint:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -88,7 +88,7 @@ jobs:
|
||||||
- run: cd charts/budibase && helm lint .
|
- run: cd charts/budibase && helm lint .
|
||||||
|
|
||||||
test-libraries:
|
test-libraries:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -122,7 +122,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
test-worker:
|
test-worker:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -151,11 +151,22 @@ jobs:
|
||||||
yarn test --verbose --reporters=default --reporters=github-actions
|
yarn test --verbose --reporters=default --reporters=github-actions
|
||||||
|
|
||||||
test-server:
|
test-server:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
datasource:
|
datasource:
|
||||||
[mssql, mysql, postgres, postgres_legacy, mongodb, mariadb, oracle, sqs, none]
|
[
|
||||||
|
mssql,
|
||||||
|
mysql,
|
||||||
|
postgres,
|
||||||
|
postgres_legacy,
|
||||||
|
mongodb,
|
||||||
|
mariadb,
|
||||||
|
oracle,
|
||||||
|
sqs,
|
||||||
|
elasticsearch,
|
||||||
|
none,
|
||||||
|
]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
@ -192,6 +203,8 @@ jobs:
|
||||||
docker pull budibase/oracle-database:23.2-slim-faststart
|
docker pull budibase/oracle-database:23.2-slim-faststart
|
||||||
elif [ "${{ matrix.datasource }}" == "postgres_legacy" ]; then
|
elif [ "${{ matrix.datasource }}" == "postgres_legacy" ]; then
|
||||||
docker pull postgres:9.5.25
|
docker pull postgres:9.5.25
|
||||||
|
elif [ "${{ matrix.datasource }}" == "elasticsearch" ]; then
|
||||||
|
docker pull elasticsearch@${{ steps.dotenv.outputs.ELASTICSEARCH_SHA }}
|
||||||
fi
|
fi
|
||||||
docker pull minio/minio &
|
docker pull minio/minio &
|
||||||
docker pull redis &
|
docker pull redis &
|
||||||
|
@ -240,7 +253,7 @@ jobs:
|
||||||
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
|
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
|
||||||
|
|
||||||
check-pro-submodule:
|
check-pro-submodule:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo and submodules
|
- name: Checkout repo and submodules
|
||||||
|
@ -299,7 +312,7 @@ jobs:
|
||||||
fi
|
fi
|
||||||
|
|
||||||
check-lockfile:
|
check-lockfile:
|
||||||
runs-on: ubuntu-22.04
|
runs-on: ubuntu-24.04
|
||||||
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repo
|
- name: Checkout repo
|
||||||
|
|
|
@ -92,7 +92,7 @@ COPY hosting/single/ssh/sshd_config /etc/
|
||||||
COPY hosting/single/ssh/ssh_setup.sh /tmp
|
COPY hosting/single/ssh/ssh_setup.sh /tmp
|
||||||
|
|
||||||
# setup letsencrypt certificate
|
# setup letsencrypt certificate
|
||||||
RUN apt-get install -y certbot python3-certbot-nginx
|
RUN apt-get update && apt-get install -y certbot python3-certbot-nginx
|
||||||
COPY hosting/letsencrypt /app/letsencrypt
|
COPY hosting/letsencrypt /app/letsencrypt
|
||||||
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
|
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||||
"version": "3.4.17",
|
"version": "3.4.20",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"concurrency": 20,
|
"concurrency": 20,
|
||||||
"command": {
|
"command": {
|
||||||
|
|
|
@ -90,7 +90,7 @@
|
||||||
$: requestEval(runtimeExpression, context, snippets)
|
$: requestEval(runtimeExpression, context, snippets)
|
||||||
$: bindingHelpers = new BindingHelpers(getCaretPosition, insertAtPos)
|
$: bindingHelpers = new BindingHelpers(getCaretPosition, insertAtPos)
|
||||||
|
|
||||||
$: bindingOptions = bindingsToCompletions(bindings, editorMode)
|
$: bindingOptions = bindingsToCompletions(enrichedBindings, editorMode)
|
||||||
$: helperOptions = allowHelpers ? getHelperCompletions(editorMode) : []
|
$: helperOptions = allowHelpers ? getHelperCompletions(editorMode) : []
|
||||||
$: snippetsOptions =
|
$: snippetsOptions =
|
||||||
usingJS && useSnippets && snippets?.length ? snippets : []
|
usingJS && useSnippets && snippets?.length ? snippets : []
|
||||||
|
|
|
@ -4492,6 +4492,12 @@
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "text",
|
||||||
|
"label": "Zoom level",
|
||||||
|
"key": "defaultZoom",
|
||||||
|
"defaultValue": "1"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"type": "event",
|
"type": "event",
|
||||||
"label": "On change",
|
"label": "On change",
|
||||||
|
|
|
@ -28,7 +28,7 @@
|
||||||
"apexcharts": "^3.48.0",
|
"apexcharts": "^3.48.0",
|
||||||
"dayjs": "^1.10.8",
|
"dayjs": "^1.10.8",
|
||||||
"downloadjs": "1.4.7",
|
"downloadjs": "1.4.7",
|
||||||
"html5-qrcode": "^2.2.1",
|
"html5-qrcode": "^2.3.8",
|
||||||
"leaflet": "^1.7.1",
|
"leaflet": "^1.7.1",
|
||||||
"sanitize-html": "^2.13.0",
|
"sanitize-html": "^2.13.0",
|
||||||
"screenfull": "^6.0.1",
|
"screenfull": "^6.0.1",
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
export let beepFrequency = 2637
|
export let beepFrequency = 2637
|
||||||
export let customFrequency = 1046
|
export let customFrequency = 1046
|
||||||
export let preferredCamera = "environment"
|
export let preferredCamera = "environment"
|
||||||
|
export let defaultZoom = 1
|
||||||
export let validator
|
export let validator
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
@ -58,6 +59,14 @@
|
||||||
html5QrCode
|
html5QrCode
|
||||||
.start(cameraSetting, cameraConfig, onScanSuccess)
|
.start(cameraSetting, cameraConfig, onScanSuccess)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
|
if (defaultZoom > 1) {
|
||||||
|
const cameraOptions =
|
||||||
|
html5QrCode.getRunningTrackCameraCapabilities()
|
||||||
|
const zoom = cameraOptions.zoomFeature()
|
||||||
|
if (zoom.isSupported()) {
|
||||||
|
zoom.apply(defaultZoom)
|
||||||
|
}
|
||||||
|
}
|
||||||
resolve({ initialised: true })
|
resolve({ initialised: true })
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
|
|
|
@ -17,6 +17,7 @@
|
||||||
export let beepFrequency
|
export let beepFrequency
|
||||||
export let customFrequency
|
export let customFrequency
|
||||||
export let preferredCamera
|
export let preferredCamera
|
||||||
|
export let defaultZoom
|
||||||
export let helpText = null
|
export let helpText = null
|
||||||
|
|
||||||
let fieldState
|
let fieldState
|
||||||
|
@ -56,6 +57,7 @@
|
||||||
{beepFrequency}
|
{beepFrequency}
|
||||||
{customFrequency}
|
{customFrequency}
|
||||||
{preferredCamera}
|
{preferredCamera}
|
||||||
|
{defaultZoom}
|
||||||
validator={fieldState.validator}
|
validator={fieldState.validator}
|
||||||
/>
|
/>
|
||||||
{/if}
|
{/if}
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
const elastic: any = {}
|
|
||||||
|
|
||||||
elastic.Client = function () {
|
|
||||||
this.index = jest.fn().mockResolvedValue({ body: [] })
|
|
||||||
this.search = jest.fn().mockResolvedValue({
|
|
||||||
body: {
|
|
||||||
hits: {
|
|
||||||
hits: [
|
|
||||||
{
|
|
||||||
_source: {
|
|
||||||
name: "test",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
})
|
|
||||||
this.update = jest.fn().mockResolvedValue({ body: [] })
|
|
||||||
this.delete = jest.fn().mockResolvedValue({ body: [] })
|
|
||||||
|
|
||||||
this.close = jest.fn()
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = elastic
|
|
|
@ -1,5 +1,6 @@
|
||||||
MSSQL_SHA=sha256:3b913841850a4d57fcfcb798be06acc88ea0f2acc5418bc0c140a43e91c4a545
|
MSSQL_SHA=sha256:d252932ef839c24c61c1139cc98f69c85ca774fa7c6bfaaa0015b7eb02b9dc87
|
||||||
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
|
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
|
||||||
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
|
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
|
||||||
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
|
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
|
||||||
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8
|
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8
|
||||||
|
ELASTICSEARCH_SHA=sha256:9a6443f55243f6acbfeb4a112d15eb3b9aac74bf25e0e39fa19b3ddd3a6879d0
|
|
@ -165,7 +165,8 @@ describe("/datasources", () => {
|
||||||
})
|
})
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({
|
const descriptions = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
|
@ -590,7 +591,8 @@ if (descriptions.length) {
|
||||||
}
|
}
|
||||||
|
|
||||||
const datasources = datasourceDescribe({
|
const datasources = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS, DatabaseName.ORACLE],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (datasources.length) {
|
if (datasources.length) {
|
||||||
|
|
|
@ -9,7 +9,8 @@ import { Knex } from "knex"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({
|
const descriptions = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
import * as setup from "./utilities"
|
import * as setup from "./utilities"
|
||||||
|
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import tk from "timekeeper"
|
||||||
import emitter from "../../../../src/events"
|
import emitter from "../../../../src/events"
|
||||||
|
@ -80,7 +77,7 @@ function encodeJS(binding: string) {
|
||||||
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
|
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
|
||||||
}
|
}
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -1,8 +1,5 @@
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
import {
|
import {
|
||||||
context,
|
context,
|
||||||
db as dbCore,
|
db as dbCore,
|
||||||
|
@ -60,7 +57,7 @@ jest.mock("@budibase/pro", () => ({
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -1,11 +1,3 @@
|
||||||
// Directly mock the AWS SDK
|
|
||||||
jest.mock("@aws-sdk/s3-request-presigner", () => ({
|
|
||||||
getSignedUrl: jest.fn(() => {
|
|
||||||
return `http://example.com`
|
|
||||||
}),
|
|
||||||
}))
|
|
||||||
jest.mock("@aws-sdk/client-s3")
|
|
||||||
|
|
||||||
import { Datasource, SourceName } from "@budibase/types"
|
import { Datasource, SourceName } from "@budibase/types"
|
||||||
import { setEnv } from "../../../environment"
|
import { setEnv } from "../../../environment"
|
||||||
import { getRequest, getConfig, afterAll as _afterAll } from "./utilities"
|
import { getRequest, getConfig, afterAll as _afterAll } from "./utilities"
|
||||||
|
@ -92,7 +84,17 @@ describe("/static", () => {
|
||||||
.set(config.defaultHeaders())
|
.set(config.defaultHeaders())
|
||||||
.expect("Content-Type", /json/)
|
.expect("Content-Type", /json/)
|
||||||
.expect(200)
|
.expect(200)
|
||||||
expect(res.body.signedUrl).toEqual("http://example.com")
|
|
||||||
|
expect(res.body.signedUrl).toStartWith(
|
||||||
|
"https://foo.s3.eu-west-1.amazonaws.com/bar?"
|
||||||
|
)
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Algorithm=AWS4-HMAC-SHA256")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Credential=bb")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Date=")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Signature=")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-Expires=900")
|
||||||
|
expect(res.body.signedUrl).toContain("X-Amz-SignedHeaders=host")
|
||||||
|
|
||||||
expect(res.body.publicUrl).toEqual(
|
expect(res.body.publicUrl).toEqual(
|
||||||
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
|
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
|
||||||
)
|
)
|
||||||
|
|
|
@ -28,17 +28,14 @@ import * as setup from "./utilities"
|
||||||
import * as uuid from "uuid"
|
import * as uuid from "uuid"
|
||||||
|
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
import timekeeper from "timekeeper"
|
import timekeeper from "timekeeper"
|
||||||
|
|
||||||
const { basicTable } = setup.structures
|
const { basicTable } = setup.structures
|
||||||
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -37,17 +37,14 @@ import {
|
||||||
ViewV2Type,
|
ViewV2Type,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { generator, mocks } from "@budibase/backend-core/tests"
|
import { generator, mocks } from "@budibase/backend-core/tests"
|
||||||
import {
|
import { datasourceDescribe } from "../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../integrations/tests/utils"
|
|
||||||
import merge from "lodash/merge"
|
import merge from "lodash/merge"
|
||||||
import { quotas } from "@budibase/pro"
|
import { quotas } from "@budibase/pro"
|
||||||
import { context, db, events, roles, setEnv } from "@budibase/backend-core"
|
import { context, db, events, roles, setEnv } from "@budibase/backend-core"
|
||||||
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/openai"
|
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/openai"
|
||||||
import nock from "nock"
|
import nock from "nock"
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -9,7 +9,8 @@ import { generator } from "@budibase/backend-core/tests"
|
||||||
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
|
||||||
|
|
||||||
const descriptions = datasourceDescribe({
|
const descriptions = datasourceDescribe({
|
||||||
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
|
plus: true,
|
||||||
|
exclude: [DatabaseName.SQS],
|
||||||
})
|
})
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import { SendEmailResponse } from "@budibase/types"
|
|
||||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||||
import * as workerRequests from "../../../utilities/workerRequests"
|
import * as workerRequests from "../../../utilities/workerRequests"
|
||||||
|
|
||||||
|
@ -6,18 +5,17 @@ jest.mock("../../../utilities/workerRequests", () => ({
|
||||||
sendSmtpEmail: jest.fn(),
|
sendSmtpEmail: jest.fn(),
|
||||||
}))
|
}))
|
||||||
|
|
||||||
function generateResponse(to: string, from: string): SendEmailResponse {
|
function generateResponse(to: string, from: string) {
|
||||||
return {
|
return {
|
||||||
message: `Email sent to ${to}.`,
|
success: true,
|
||||||
|
response: {
|
||||||
accepted: [to],
|
accepted: [to],
|
||||||
envelope: {
|
envelope: {
|
||||||
from: from,
|
from: from,
|
||||||
to: [to],
|
to: [to],
|
||||||
},
|
},
|
||||||
messageId: "messageId",
|
message: `Email sent to ${to}.`,
|
||||||
pending: [],
|
},
|
||||||
rejected: [],
|
|
||||||
response: "response",
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@ import {
|
||||||
import { Client, ClientOptions } from "@elastic/elasticsearch"
|
import { Client, ClientOptions } from "@elastic/elasticsearch"
|
||||||
import { HOST_ADDRESS } from "./utils"
|
import { HOST_ADDRESS } from "./utils"
|
||||||
|
|
||||||
interface ElasticsearchConfig {
|
export interface ElasticsearchConfig {
|
||||||
url: string
|
url: string
|
||||||
ssl?: boolean
|
ssl?: boolean
|
||||||
ca?: string
|
ca?: string
|
||||||
|
@ -99,9 +99,9 @@ const SCHEMA: Integration = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
class ElasticSearchIntegration implements IntegrationBase {
|
export class ElasticSearchIntegration implements IntegrationBase {
|
||||||
private config: ElasticsearchConfig
|
private config: ElasticsearchConfig
|
||||||
private client
|
private client: Client
|
||||||
|
|
||||||
constructor(config: ElasticsearchConfig) {
|
constructor(config: ElasticsearchConfig) {
|
||||||
this.config = config
|
this.config = config
|
||||||
|
@ -132,20 +132,23 @@ class ElasticSearchIntegration implements IntegrationBase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async create(query: { index: string; json: object }) {
|
async create(query: {
|
||||||
const { index, json } = query
|
index: string
|
||||||
|
json: object
|
||||||
|
extra?: Record<string, string>
|
||||||
|
}) {
|
||||||
|
const { index, json, extra } = query
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const result = await this.client.index({
|
const result = await this.client.index({
|
||||||
index,
|
index,
|
||||||
body: json,
|
body: json,
|
||||||
|
...extra,
|
||||||
})
|
})
|
||||||
return result.body
|
return result.body
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error writing to elasticsearch", err)
|
console.error("Error writing to elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -160,41 +163,46 @@ class ElasticSearchIntegration implements IntegrationBase {
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error querying elasticsearch", err)
|
console.error("Error querying elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async update(query: { id: string; index: string; json: object }) {
|
async update(query: {
|
||||||
const { id, index, json } = query
|
id: string
|
||||||
|
index: string
|
||||||
|
json: object
|
||||||
|
extra?: Record<string, string>
|
||||||
|
}) {
|
||||||
|
const { id, index, json, extra } = query
|
||||||
try {
|
try {
|
||||||
const result = await this.client.update({
|
const result = await this.client.update({
|
||||||
id,
|
id,
|
||||||
index,
|
index,
|
||||||
body: json,
|
body: json,
|
||||||
|
...extra,
|
||||||
})
|
})
|
||||||
return result.body
|
return result.body
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error querying elasticsearch", err)
|
console.error("Error querying elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async delete(query: { id: string; index: string }) {
|
async delete(query: {
|
||||||
const { id, index } = query
|
id: string
|
||||||
|
index: string
|
||||||
|
extra?: Record<string, string>
|
||||||
|
}) {
|
||||||
|
const { id, index, extra } = query
|
||||||
try {
|
try {
|
||||||
const result = await this.client.delete({
|
const result = await this.client.delete({
|
||||||
id,
|
id,
|
||||||
index,
|
index,
|
||||||
|
...extra,
|
||||||
})
|
})
|
||||||
return result.body
|
return result.body
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error("Error deleting from elasticsearch", err)
|
console.error("Error deleting from elasticsearch", err)
|
||||||
throw err
|
throw err
|
||||||
} finally {
|
|
||||||
await this.client.close()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,83 +1,81 @@
|
||||||
import { default as ElasticSearchIntegration } from "../elasticsearch"
|
import { Datasource } from "@budibase/types"
|
||||||
|
import { ElasticsearchConfig, ElasticSearchIntegration } from "../elasticsearch"
|
||||||
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
import { DatabaseName, datasourceDescribe } from "./utils"
|
||||||
|
|
||||||
jest.mock("@elastic/elasticsearch")
|
const describes = datasourceDescribe({ only: [DatabaseName.ELASTICSEARCH] })
|
||||||
|
|
||||||
class TestConfiguration {
|
if (describes.length) {
|
||||||
integration: any
|
describe.each(describes)("Elasticsearch Integration", ({ dsProvider }) => {
|
||||||
|
let datasource: Datasource
|
||||||
|
let integration: ElasticSearchIntegration
|
||||||
|
|
||||||
constructor(config: any = {}) {
|
let index: string
|
||||||
this.integration = new ElasticSearchIntegration.integration(config)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
describe("Elasticsearch Integration", () => {
|
beforeAll(async () => {
|
||||||
let config: any
|
const ds = await dsProvider()
|
||||||
let indexName = "Users"
|
datasource = ds.datasource!
|
||||||
|
})
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
config = new TestConfiguration()
|
index = generator.guid()
|
||||||
|
integration = new ElasticSearchIntegration(
|
||||||
|
datasource.config! as ElasticsearchConfig
|
||||||
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the create method with the correct params", async () => {
|
it("can create a record", async () => {
|
||||||
const body = {
|
await integration.create({
|
||||||
name: "Hello",
|
index,
|
||||||
}
|
json: { name: "Hello" },
|
||||||
await config.integration.create({
|
extra: { refresh: "true" },
|
||||||
index: indexName,
|
|
||||||
json: body,
|
|
||||||
})
|
})
|
||||||
expect(config.integration.client.index).toHaveBeenCalledWith({
|
const records = await integration.read({
|
||||||
index: indexName,
|
index,
|
||||||
body,
|
json: { query: { match_all: {} } },
|
||||||
})
|
})
|
||||||
|
expect(records).toEqual([{ name: "Hello" }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the read method with the correct params", async () => {
|
it("can update a record", async () => {
|
||||||
const body = {
|
const create = await integration.create({
|
||||||
query: {
|
index,
|
||||||
term: {
|
json: { name: "Hello" },
|
||||||
name: "kimchy",
|
extra: { refresh: "true" },
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
const response = await config.integration.read({
|
|
||||||
index: indexName,
|
|
||||||
json: body,
|
|
||||||
})
|
|
||||||
expect(config.integration.client.search).toHaveBeenCalledWith({
|
|
||||||
index: indexName,
|
|
||||||
body,
|
|
||||||
})
|
|
||||||
expect(response).toEqual(expect.any(Array))
|
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the update method with the correct params", async () => {
|
await integration.update({
|
||||||
const body = {
|
id: create._id,
|
||||||
name: "updated",
|
index,
|
||||||
}
|
json: { doc: { name: "World" } },
|
||||||
|
extra: { refresh: "true" },
|
||||||
const response = await config.integration.update({
|
|
||||||
id: "1234",
|
|
||||||
index: indexName,
|
|
||||||
json: body,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(config.integration.client.update).toHaveBeenCalledWith({
|
const records = await integration.read({
|
||||||
id: "1234",
|
index,
|
||||||
index: indexName,
|
json: { query: { match_all: {} } },
|
||||||
body,
|
|
||||||
})
|
})
|
||||||
expect(response).toEqual(expect.any(Array))
|
expect(records).toEqual([{ name: "World" }])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("calls the delete method with the correct params", async () => {
|
it("can delete a record", async () => {
|
||||||
const body = {
|
const create = await integration.create({
|
||||||
id: "1234",
|
index,
|
||||||
}
|
json: { name: "Hello" },
|
||||||
|
extra: { refresh: "true" },
|
||||||
const response = await config.integration.delete(body)
|
|
||||||
|
|
||||||
expect(config.integration.client.delete).toHaveBeenCalledWith(body)
|
|
||||||
expect(response).toEqual(expect.any(Array))
|
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
await integration.delete({
|
||||||
|
id: create._id,
|
||||||
|
index,
|
||||||
|
extra: { refresh: "true" },
|
||||||
|
})
|
||||||
|
|
||||||
|
const records = await integration.read({
|
||||||
|
index,
|
||||||
|
json: { query: { match_all: {} } },
|
||||||
|
})
|
||||||
|
expect(records).toEqual([])
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -0,0 +1,54 @@
|
||||||
|
import { Datasource, SourceName } from "@budibase/types"
|
||||||
|
import { GenericContainer, Wait } from "testcontainers"
|
||||||
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
|
import { startContainer } from "."
|
||||||
|
import { ELASTICSEARCH_IMAGE } from "./images"
|
||||||
|
import { ElasticsearchConfig } from "../../elasticsearch"
|
||||||
|
|
||||||
|
let ports: Promise<testContainerUtils.Port[]>
|
||||||
|
|
||||||
|
export async function getDatasource(): Promise<Datasource> {
|
||||||
|
if (!ports) {
|
||||||
|
ports = startContainer(
|
||||||
|
new GenericContainer(ELASTICSEARCH_IMAGE)
|
||||||
|
.withExposedPorts(9200)
|
||||||
|
.withEnvironment({
|
||||||
|
// We need to set the discovery type to single-node to avoid the
|
||||||
|
// cluster waiting for other nodes to join before starting up.
|
||||||
|
"discovery.type": "single-node",
|
||||||
|
// We disable security to avoid having to do any auth against the
|
||||||
|
// container, and to disable SSL. With SSL enabled it uses a self
|
||||||
|
// signed certificate that we'd have to ignore anyway.
|
||||||
|
"xpack.security.enabled": "false",
|
||||||
|
})
|
||||||
|
.withWaitStrategy(
|
||||||
|
Wait.forHttp(
|
||||||
|
// Single node clusters never reach status green, so we wait for
|
||||||
|
// yellow instead.
|
||||||
|
"/_cluster/health?wait_for_status=yellow&timeout=10s",
|
||||||
|
9200
|
||||||
|
).withStartupTimeout(60000)
|
||||||
|
)
|
||||||
|
// We gave the container a tmpfs data directory. Without this, I found
|
||||||
|
// that the default data directory was very small and the container
|
||||||
|
// easily filled it up. This caused the cluster to go into a red status
|
||||||
|
// and stop responding to requests.
|
||||||
|
.withTmpFs({ "/usr/share/elasticsearch/data": "rw" })
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = (await ports).find(x => x.container === 9200)?.host
|
||||||
|
if (!port) {
|
||||||
|
throw new Error("Elasticsearch port not found")
|
||||||
|
}
|
||||||
|
|
||||||
|
const config: ElasticsearchConfig = {
|
||||||
|
url: `http://127.0.0.1:${port}`,
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
type: "datasource",
|
||||||
|
source: SourceName.ELASTICSEARCH,
|
||||||
|
config,
|
||||||
|
}
|
||||||
|
}
|
|
@ -12,3 +12,4 @@ export const POSTGRES_IMAGE = `postgres@${process.env.POSTGRES_SHA}`
|
||||||
export const POSTGRES_LEGACY_IMAGE = `postgres:9.5.25`
|
export const POSTGRES_LEGACY_IMAGE = `postgres:9.5.25`
|
||||||
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
|
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
|
||||||
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`
|
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`
|
||||||
|
export const ELASTICSEARCH_IMAGE = `elasticsearch@${process.env.ELASTICSEARCH_SHA}`
|
||||||
|
|
|
@ -6,6 +6,7 @@ import * as mysql from "./mysql"
|
||||||
import * as mssql from "./mssql"
|
import * as mssql from "./mssql"
|
||||||
import * as mariadb from "./mariadb"
|
import * as mariadb from "./mariadb"
|
||||||
import * as oracle from "./oracle"
|
import * as oracle from "./oracle"
|
||||||
|
import * as elasticsearch from "./elasticsearch"
|
||||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
import { Knex } from "knex"
|
import { Knex } from "knex"
|
||||||
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
|
||||||
|
@ -23,22 +24,32 @@ export enum DatabaseName {
|
||||||
MARIADB = "mariadb",
|
MARIADB = "mariadb",
|
||||||
ORACLE = "oracle",
|
ORACLE = "oracle",
|
||||||
SQS = "sqs",
|
SQS = "sqs",
|
||||||
|
ELASTICSEARCH = "elasticsearch",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const DATASOURCE_PLUS = [
|
||||||
|
DatabaseName.POSTGRES,
|
||||||
|
DatabaseName.POSTGRES_LEGACY,
|
||||||
|
DatabaseName.MYSQL,
|
||||||
|
DatabaseName.SQL_SERVER,
|
||||||
|
DatabaseName.MARIADB,
|
||||||
|
DatabaseName.ORACLE,
|
||||||
|
DatabaseName.SQS,
|
||||||
|
]
|
||||||
|
|
||||||
const providers: Record<DatabaseName, DatasourceProvider> = {
|
const providers: Record<DatabaseName, DatasourceProvider> = {
|
||||||
|
// datasource_plus entries
|
||||||
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
[DatabaseName.POSTGRES]: postgres.getDatasource,
|
||||||
[DatabaseName.POSTGRES_LEGACY]: postgres.getLegacyDatasource,
|
[DatabaseName.POSTGRES_LEGACY]: postgres.getLegacyDatasource,
|
||||||
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
|
||||||
[DatabaseName.MYSQL]: mysql.getDatasource,
|
[DatabaseName.MYSQL]: mysql.getDatasource,
|
||||||
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
|
||||||
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
[DatabaseName.MARIADB]: mariadb.getDatasource,
|
||||||
[DatabaseName.ORACLE]: oracle.getDatasource,
|
[DatabaseName.ORACLE]: oracle.getDatasource,
|
||||||
[DatabaseName.SQS]: async () => undefined,
|
[DatabaseName.SQS]: async () => undefined,
|
||||||
}
|
|
||||||
|
|
||||||
export interface DatasourceDescribeOpts {
|
// rest
|
||||||
only?: DatabaseName[]
|
[DatabaseName.ELASTICSEARCH]: elasticsearch.getDatasource,
|
||||||
exclude?: DatabaseName[]
|
[DatabaseName.MONGODB]: mongodb.getDatasource,
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DatasourceDescribeReturnPromise {
|
export interface DatasourceDescribeReturnPromise {
|
||||||
|
@ -103,6 +114,20 @@ function createDummyTest() {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface OnlyOpts {
|
||||||
|
only: DatabaseName[]
|
||||||
|
}
|
||||||
|
|
||||||
|
interface PlusOpts {
|
||||||
|
plus: true
|
||||||
|
exclude?: DatabaseName[]
|
||||||
|
}
|
||||||
|
|
||||||
|
export type DatasourceDescribeOpts = OnlyOpts | PlusOpts
|
||||||
|
|
||||||
|
// If you ever want to rename this function, be mindful that you will also need
|
||||||
|
// to modify src/tests/filters/index.js to make sure that we're correctly
|
||||||
|
// filtering datasource/non-datasource tests in CI.
|
||||||
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||||
// tests that call this need a lot longer timeouts
|
// tests that call this need a lot longer timeouts
|
||||||
jest.setTimeout(120000)
|
jest.setTimeout(120000)
|
||||||
|
@ -111,17 +136,15 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||||
createDummyTest()
|
createDummyTest()
|
||||||
}
|
}
|
||||||
|
|
||||||
const { only, exclude } = opts
|
let databases: DatabaseName[] = []
|
||||||
|
if ("only" in opts) {
|
||||||
if (only && exclude) {
|
databases = opts.only
|
||||||
throw new Error("you can only supply one of 'only' or 'exclude'")
|
} else if ("plus" in opts) {
|
||||||
}
|
databases = Object.values(DatabaseName)
|
||||||
|
.filter(db => DATASOURCE_PLUS.includes(db))
|
||||||
let databases = Object.values(DatabaseName)
|
.filter(db => !opts.exclude?.includes(db))
|
||||||
if (only) {
|
} else {
|
||||||
databases = only
|
throw new Error("invalid options")
|
||||||
} else if (exclude) {
|
|
||||||
databases = databases.filter(db => !exclude.includes(db))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (process.env.DATASOURCE) {
|
if (process.env.DATASOURCE) {
|
||||||
|
@ -156,6 +179,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
|
||||||
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
isMSSQL: dbName === DatabaseName.SQL_SERVER,
|
||||||
isOracle: dbName === DatabaseName.ORACLE,
|
isOracle: dbName === DatabaseName.ORACLE,
|
||||||
isMariaDB: dbName === DatabaseName.MARIADB,
|
isMariaDB: dbName === DatabaseName.MARIADB,
|
||||||
|
isElasticsearch: dbName === DatabaseName.ELASTICSEARCH,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
})
|
})
|
||||||
.withWaitStrategy(
|
.withWaitStrategy(
|
||||||
Wait.forSuccessfulCommand(
|
Wait.forSuccessfulCommand(
|
||||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
"/opt/mssql-tools18/bin/sqlcmd -C -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||||
).withStartupTimeout(20000)
|
).withStartupTimeout(20000)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -44,7 +44,8 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
user: "sa",
|
user: "sa",
|
||||||
password: "Password_123",
|
password: "Password_123",
|
||||||
options: {
|
options: {
|
||||||
encrypt: false,
|
encrypt: true,
|
||||||
|
trustServerCertificate: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,16 +10,13 @@ import {
|
||||||
import { search } from "../../../../../sdk/app/rows/search"
|
import { search } from "../../../../../sdk/app/rows/search"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
|
||||||
import {
|
import { datasourceDescribe } from "../../../../../integrations/tests/utils"
|
||||||
DatabaseName,
|
|
||||||
datasourceDescribe,
|
|
||||||
} from "../../../../../integrations/tests/utils"
|
|
||||||
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../../../tests/utilities/structures"
|
||||||
|
|
||||||
// These test cases are only for things that cannot be tested through the API
|
// These test cases are only for things that cannot be tested through the API
|
||||||
// (e.g. limiting searches to returning specific fields). If it's possible to
|
// (e.g. limiting searches to returning specific fields). If it's possible to
|
||||||
// test through the API, it should be done there instead.
|
// test through the API, it should be done there instead.
|
||||||
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
|
const descriptions = datasourceDescribe({ plus: true })
|
||||||
|
|
||||||
if (descriptions.length) {
|
if (descriptions.length) {
|
||||||
describe.each(descriptions)(
|
describe.each(descriptions)(
|
||||||
|
|
|
@ -8,15 +8,7 @@ import {
|
||||||
logging,
|
logging,
|
||||||
env as coreEnv,
|
env as coreEnv,
|
||||||
} from "@budibase/backend-core"
|
} from "@budibase/backend-core"
|
||||||
import {
|
import { Ctx, User, EmailInvite, EmailAttachment } from "@budibase/types"
|
||||||
Ctx,
|
|
||||||
User,
|
|
||||||
EmailInvite,
|
|
||||||
EmailAttachment,
|
|
||||||
SendEmailResponse,
|
|
||||||
SendEmailRequest,
|
|
||||||
EmailTemplatePurpose,
|
|
||||||
} from "@budibase/types"
|
|
||||||
|
|
||||||
interface Request {
|
interface Request {
|
||||||
ctx?: Ctx
|
ctx?: Ctx
|
||||||
|
@ -118,23 +110,25 @@ export async function sendSmtpEmail({
|
||||||
invite?: EmailInvite
|
invite?: EmailInvite
|
||||||
}) {
|
}) {
|
||||||
// tenant ID will be set in header
|
// tenant ID will be set in header
|
||||||
const request: SendEmailRequest = {
|
const response = await fetch(
|
||||||
|
checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`),
|
||||||
|
createRequest({
|
||||||
|
method: "POST",
|
||||||
|
body: {
|
||||||
email: to,
|
email: to,
|
||||||
from,
|
from,
|
||||||
contents,
|
contents,
|
||||||
subject,
|
subject,
|
||||||
cc,
|
cc,
|
||||||
bcc,
|
bcc,
|
||||||
purpose: EmailTemplatePurpose.CUSTOM,
|
purpose: "custom",
|
||||||
automation,
|
automation,
|
||||||
invite,
|
invite,
|
||||||
attachments,
|
attachments,
|
||||||
}
|
},
|
||||||
const response = await fetch(
|
})
|
||||||
checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`),
|
|
||||||
createRequest({ method: "POST", body: request })
|
|
||||||
)
|
)
|
||||||
return (await checkResponse(response, "send email")) as SendEmailResponse
|
return checkResponse(response, "send email")
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function removeAppFromUserRoles(ctx: Ctx, appId: string) {
|
export async function removeAppFromUserRoles(ctx: Ctx, appId: string) {
|
||||||
|
|
|
@ -17,7 +17,6 @@
|
||||||
"@budibase/nano": "10.1.5",
|
"@budibase/nano": "10.1.5",
|
||||||
"@types/json-schema": "^7.0.15",
|
"@types/json-schema": "^7.0.15",
|
||||||
"@types/koa": "2.13.4",
|
"@types/koa": "2.13.4",
|
||||||
"@types/nodemailer": "^6.4.17",
|
|
||||||
"@types/redlock": "4.0.7",
|
"@types/redlock": "4.0.7",
|
||||||
"koa-useragent": "^4.1.0",
|
"koa-useragent": "^4.1.0",
|
||||||
"rimraf": "3.0.2",
|
"rimraf": "3.0.2",
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import { EmailAttachment, EmailInvite } from "../../../documents"
|
import { EmailAttachment, EmailInvite } from "../../../documents"
|
||||||
import SMTPTransport from "nodemailer/lib/smtp-transport"
|
|
||||||
|
|
||||||
export enum EmailTemplatePurpose {
|
export enum EmailTemplatePurpose {
|
||||||
CORE = "core",
|
CORE = "core",
|
||||||
|
@ -13,17 +12,17 @@ export enum EmailTemplatePurpose {
|
||||||
export interface SendEmailRequest {
|
export interface SendEmailRequest {
|
||||||
workspaceId?: string
|
workspaceId?: string
|
||||||
email: string
|
email: string
|
||||||
userId?: string
|
userId: string
|
||||||
purpose: EmailTemplatePurpose
|
purpose: EmailTemplatePurpose
|
||||||
contents?: string
|
contents?: string
|
||||||
from?: string
|
from?: string
|
||||||
subject: string
|
subject: string
|
||||||
cc?: string
|
cc?: boolean
|
||||||
bcc?: string
|
bcc?: boolean
|
||||||
automation?: boolean
|
automation?: boolean
|
||||||
invite?: EmailInvite
|
invite?: EmailInvite
|
||||||
attachments?: EmailAttachment[]
|
attachments?: EmailAttachment[]
|
||||||
}
|
}
|
||||||
export interface SendEmailResponse extends SMTPTransport.SentMessageInfo {
|
export interface SendEmailResponse extends Record<string, any> {
|
||||||
message: string
|
message: string
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
import { Document } from "../../document"
|
import { Document } from "../../document"
|
||||||
import { User } from "../../global"
|
import { User } from "../../global"
|
||||||
|
import { ReadStream } from "fs"
|
||||||
import { Row } from "../row"
|
import { Row } from "../row"
|
||||||
import { Table } from "../table"
|
import { Table } from "../table"
|
||||||
import { AutomationStep, AutomationTrigger } from "./schema"
|
import { AutomationStep, AutomationTrigger } from "./schema"
|
||||||
import { ContextEmitter } from "../../../sdk"
|
import { ContextEmitter } from "../../../sdk"
|
||||||
import { Readable } from "stream"
|
|
||||||
|
|
||||||
export enum AutomationIOType {
|
export enum AutomationIOType {
|
||||||
OBJECT = "object",
|
OBJECT = "object",
|
||||||
|
@ -108,8 +108,8 @@ export interface SendEmailOpts {
|
||||||
subject: string
|
subject: string
|
||||||
// info Pass in a structure of information to be stored alongside the invitation.
|
// info Pass in a structure of information to be stored alongside the invitation.
|
||||||
info?: any
|
info?: any
|
||||||
cc?: string
|
cc?: boolean
|
||||||
bcc?: string
|
bcc?: boolean
|
||||||
automation?: boolean
|
automation?: boolean
|
||||||
invite?: EmailInvite
|
invite?: EmailInvite
|
||||||
attachments?: EmailAttachment[]
|
attachments?: EmailAttachment[]
|
||||||
|
@ -269,7 +269,7 @@ export type AutomationAttachment = {
|
||||||
|
|
||||||
export type AutomationAttachmentContent = {
|
export type AutomationAttachmentContent = {
|
||||||
filename: string
|
filename: string
|
||||||
content: Readable
|
content: ReadStream | NodeJS.ReadableStream
|
||||||
}
|
}
|
||||||
|
|
||||||
export type BucketedContent = AutomationAttachmentContent & {
|
export type BucketedContent = AutomationAttachmentContent & {
|
||||||
|
|
|
@ -86,7 +86,6 @@
|
||||||
"@types/koa__router": "12.0.4",
|
"@types/koa__router": "12.0.4",
|
||||||
"@types/lodash": "4.14.200",
|
"@types/lodash": "4.14.200",
|
||||||
"@types/node-fetch": "2.6.4",
|
"@types/node-fetch": "2.6.4",
|
||||||
"@types/nodemailer": "^6.4.17",
|
|
||||||
"@types/server-destroy": "1.0.1",
|
"@types/server-destroy": "1.0.1",
|
||||||
"@types/supertest": "2.0.14",
|
"@types/supertest": "2.0.14",
|
||||||
"@types/uuid": "8.3.4",
|
"@types/uuid": "8.3.4",
|
||||||
|
|
|
@ -24,13 +24,10 @@ export async function sendEmail(
|
||||||
invite,
|
invite,
|
||||||
attachments,
|
attachments,
|
||||||
} = ctx.request.body
|
} = ctx.request.body
|
||||||
let user: User | undefined = undefined
|
let user: any
|
||||||
if (userId) {
|
if (userId) {
|
||||||
const db = tenancy.getGlobalDB()
|
const db = tenancy.getGlobalDB()
|
||||||
user = await db.tryGet<User>(userId)
|
user = await db.get<User>(userId)
|
||||||
}
|
|
||||||
if (!user) {
|
|
||||||
ctx.throw(404, "User not found.")
|
|
||||||
}
|
}
|
||||||
const response = await sendEmailFn(email, purpose, {
|
const response = await sendEmailFn(email, purpose, {
|
||||||
workspaceId,
|
workspaceId,
|
||||||
|
|
|
@ -13,8 +13,7 @@ import { configs, cache, objectStore } from "@budibase/backend-core"
|
||||||
import ical from "ical-generator"
|
import ical from "ical-generator"
|
||||||
import _ from "lodash"
|
import _ from "lodash"
|
||||||
|
|
||||||
import nodemailer from "nodemailer"
|
const nodemailer = require("nodemailer")
|
||||||
import SMTPTransport from "nodemailer/lib/smtp-transport"
|
|
||||||
|
|
||||||
const TEST_MODE = env.ENABLE_EMAIL_TEST_MODE && env.isDev()
|
const TEST_MODE = env.ENABLE_EMAIL_TEST_MODE && env.isDev()
|
||||||
const TYPE = TemplateType.EMAIL
|
const TYPE = TemplateType.EMAIL
|
||||||
|
@ -27,7 +26,7 @@ const FULL_EMAIL_PURPOSES = [
|
||||||
]
|
]
|
||||||
|
|
||||||
function createSMTPTransport(config?: SMTPInnerConfig) {
|
function createSMTPTransport(config?: SMTPInnerConfig) {
|
||||||
let options: SMTPTransport.Options
|
let options: any
|
||||||
let secure = config?.secure
|
let secure = config?.secure
|
||||||
// default it if not specified
|
// default it if not specified
|
||||||
if (secure == null) {
|
if (secure == null) {
|
||||||
|
@ -162,7 +161,7 @@ export async function sendEmail(
|
||||||
const code = await getLinkCode(purpose, email, opts.user, opts?.info)
|
const code = await getLinkCode(purpose, email, opts.user, opts?.info)
|
||||||
let context = await getSettingsTemplateContext(purpose, code)
|
let context = await getSettingsTemplateContext(purpose, code)
|
||||||
|
|
||||||
let message: Parameters<typeof transport.sendMail>[0] = {
|
let message: any = {
|
||||||
from: opts?.from || config?.from,
|
from: opts?.from || config?.from,
|
||||||
html: await buildEmail(purpose, email, context, {
|
html: await buildEmail(purpose, email, context, {
|
||||||
user: opts?.user,
|
user: opts?.user,
|
||||||
|
|
21
yarn.lock
21
yarn.lock
|
@ -2785,9 +2785,9 @@
|
||||||
through2 "^2.0.0"
|
through2 "^2.0.0"
|
||||||
|
|
||||||
"@budibase/pro@npm:@budibase/pro@latest":
|
"@budibase/pro@npm:@budibase/pro@latest":
|
||||||
version "3.4.16"
|
version "3.4.12"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.16.tgz#c482a400e27b7e89ca73092c4c81bdeac1d24581"
|
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.4.12.tgz#60e630944de4e2de970a04179d8f0f57d48ce75e"
|
||||||
integrity sha512-8ECnqOh9jQ10KlQEwmKPFcoVGE+2gGgSybj+vbshwDp1zAW76doyMR2DMNjEatNpWVnpoMnTkDWtE9aqQ5v0vQ==
|
integrity sha512-msUBmcWxRDg+ugjZvd27XudERQqtQRdiARsO8MaDVTcp5ejIXgshEIVVshHOCj3hcbRblw9pXvBIMI53iTMUsA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@anthropic-ai/sdk" "^0.27.3"
|
"@anthropic-ai/sdk" "^0.27.3"
|
||||||
"@budibase/backend-core" "*"
|
"@budibase/backend-core" "*"
|
||||||
|
@ -6775,13 +6775,6 @@
|
||||||
dependencies:
|
dependencies:
|
||||||
undici-types "~6.19.2"
|
undici-types "~6.19.2"
|
||||||
|
|
||||||
"@types/nodemailer@^6.4.17":
|
|
||||||
version "6.4.17"
|
|
||||||
resolved "https://registry.yarnpkg.com/@types/nodemailer/-/nodemailer-6.4.17.tgz#5c82a42aee16a3dd6ea31446a1bd6a447f1ac1a4"
|
|
||||||
integrity sha512-I9CCaIp6DTldEg7vyUTZi8+9Vo0hi1/T8gv3C89yk1rSAAzoKQ8H8ki/jBYJSFoH/BisgLP8tkZMlQ91CIquww==
|
|
||||||
dependencies:
|
|
||||||
"@types/node" "*"
|
|
||||||
|
|
||||||
"@types/normalize-package-data@^2.4.0":
|
"@types/normalize-package-data@^2.4.0":
|
||||||
version "2.4.1"
|
version "2.4.1"
|
||||||
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301"
|
resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301"
|
||||||
|
@ -12789,10 +12782,10 @@ html-tag@^2.0.0:
|
||||||
is-self-closing "^1.0.1"
|
is-self-closing "^1.0.1"
|
||||||
kind-of "^6.0.0"
|
kind-of "^6.0.0"
|
||||||
|
|
||||||
html5-qrcode@^2.2.1:
|
html5-qrcode@^2.3.8:
|
||||||
version "2.3.7"
|
version "2.3.8"
|
||||||
resolved "https://registry.yarnpkg.com/html5-qrcode/-/html5-qrcode-2.3.7.tgz#09ed2ca7473a47bd551088c15fcfcb7cb409a5be"
|
resolved "https://registry.yarnpkg.com/html5-qrcode/-/html5-qrcode-2.3.8.tgz#0b0cdf7a9926cfd4be530e13a51db47592adfa0d"
|
||||||
integrity sha512-Jmlok9Ynm49hgVXkdupWryf8o430proIFoQsRl1LmTg4Rq461W72omylR9yw9tsEMtswMEw3wacUM5y0agOBQA==
|
integrity sha512-jsr4vafJhwoLVEDW3n1KvPnCCXWaQfRng0/EEYk1vNcQGcG/htAdhJX0be8YyqMoSz7+hZvOZSTAepsabiuhiQ==
|
||||||
|
|
||||||
htmlparser2@^8.0.0:
|
htmlparser2@^8.0.0:
|
||||||
version "8.0.1"
|
version "8.0.1"
|
||||||
|
|
Loading…
Reference in New Issue