Merge remote-tracking branch 'refs/remotes/origin/feat/pc-ts-conversions' into feat/pc-ts-conversions

This commit is contained in:
Peter Clement 2025-03-03 09:00:24 +00:00
commit c77e4c4f09
56 changed files with 1486 additions and 769 deletions

View File

@ -30,7 +30,7 @@ env:
jobs:
lint:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -47,7 +47,7 @@ jobs:
- run: yarn lint
build:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -76,7 +76,7 @@ jobs:
fi
helm-lint:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -88,7 +88,7 @@ jobs:
- run: cd charts/budibase && helm lint .
test-libraries:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -122,7 +122,7 @@ jobs:
fi
test-worker:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -151,11 +151,22 @@ jobs:
yarn test --verbose --reporters=default --reporters=github-actions
test-server:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
strategy:
matrix:
datasource:
[mssql, mysql, postgres, postgres_legacy, mongodb, mariadb, oracle, sqs, none]
[
mssql,
mysql,
postgres,
postgres_legacy,
mongodb,
mariadb,
oracle,
sqs,
elasticsearch,
none,
]
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -192,6 +203,8 @@ jobs:
docker pull budibase/oracle-database:23.2-slim-faststart
elif [ "${{ matrix.datasource }}" == "postgres_legacy" ]; then
docker pull postgres:9.5.25
elif [ "${{ matrix.datasource }}" == "elasticsearch" ]; then
docker pull elasticsearch@${{ steps.dotenv.outputs.ELASTICSEARCH_SHA }}
fi
docker pull minio/minio &
docker pull redis &
@ -240,7 +253,7 @@ jobs:
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
check-pro-submodule:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
steps:
- name: Checkout repo and submodules
@ -299,7 +312,7 @@ jobs:
fi
check-lockfile:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
steps:
- name: Checkout repo

View File

@ -108,7 +108,7 @@ You can install them following any of the steps described below:
- Installation steps: https://asdf-vm.com/guide/getting-started.html
- asdf plugin add nodejs
- asdf plugin add python
- npm install -g yarn
- asdf plugin add yarn
### Using NVM and pyenv

View File

@ -92,7 +92,7 @@ COPY hosting/single/ssh/sshd_config /etc/
COPY hosting/single/ssh/ssh_setup.sh /tmp
# setup letsencrypt certificate
RUN apt-get install -y certbot python3-certbot-nginx
RUN apt-get update && apt-get install -y certbot python3-certbot-nginx
COPY hosting/letsencrypt /app/letsencrypt
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.4.16",
"version": "3.4.21",
"npmClient": "yarn",
"concurrency": 20,
"command": {

View File

@ -90,7 +90,7 @@
$: requestEval(runtimeExpression, context, snippets)
$: bindingHelpers = new BindingHelpers(getCaretPosition, insertAtPos)
$: bindingOptions = bindingsToCompletions(bindings, editorMode)
$: bindingOptions = bindingsToCompletions(enrichedBindings, editorMode)
$: helperOptions = allowHelpers ? getHelperCompletions(editorMode) : []
$: snippetsOptions =
usingJS && useSnippets && snippets?.length ? snippets : []

View File

@ -81,11 +81,11 @@ export const screenComponentErrorList = derived(
const errors: UIComponentError[] = []
function checkComponentErrors(component: Component, ancestors: string[]) {
errors.push(...getMissingAncestors(component, definitions, ancestors))
errors.push(
...getInvalidDatasources(screen, component, datasources, definitions)
)
errors.push(...getMissingRequiredSettings(component, definitions))
errors.push(...getMissingAncestors(component, definitions, ancestors))
for (const child of component._children || []) {
checkComponentErrors(child, [...ancestors, component._component])
@ -239,7 +239,10 @@ function getMissingAncestors(
ancestors: string[]
): UIComponentError[] {
const definition = definitions[component._component]
if (ancestors.some(a => !a.startsWith(BudibasePrefix))) {
// We don't have a way to know what components are used within a plugin component
return []
}
if (!definition?.requiredAncestors?.length) {
return []
}

View File

@ -4492,6 +4492,12 @@
}
]
},
{
"type": "text",
"label": "Zoom level",
"key": "defaultZoom",
"defaultValue": "1"
},
{
"type": "event",
"label": "On change",

View File

@ -28,7 +28,7 @@
"apexcharts": "^3.48.0",
"dayjs": "^1.10.8",
"downloadjs": "1.4.7",
"html5-qrcode": "^2.2.1",
"html5-qrcode": "^2.3.8",
"leaflet": "^1.7.1",
"sanitize-html": "^2.13.0",
"screenfull": "^6.0.1",

View File

@ -20,6 +20,7 @@
export let beepFrequency = 2637
export let customFrequency = 1046
export let preferredCamera = "environment"
export let defaultZoom = 1
export let validator
const dispatch = createEventDispatcher()
@ -58,6 +59,14 @@
html5QrCode
.start(cameraSetting, cameraConfig, onScanSuccess)
.then(() => {
if (defaultZoom > 1) {
const cameraOptions =
html5QrCode.getRunningTrackCameraCapabilities()
const zoom = cameraOptions.zoomFeature()
if (zoom.isSupported()) {
zoom.apply(defaultZoom)
}
}
resolve({ initialised: true })
})
.catch(err => {

View File

@ -17,6 +17,7 @@
export let beepFrequency
export let customFrequency
export let preferredCamera
export let defaultZoom
export let helpText = null
let fieldState
@ -56,6 +57,7 @@
{beepFrequency}
{customFrequency}
{preferredCamera}
{defaultZoom}
validator={fieldState.validator}
/>
{/if}

View File

@ -1,142 +0,0 @@
import ClientApp from "./components/ClientApp.svelte"
import UpdatingApp from "./components/UpdatingApp.svelte"
import {
builderStore,
appStore,
blockStore,
componentStore,
environmentStore,
dndStore,
eventStore,
hoverStore,
stateStore,
routeStore,
} from "./stores"
import loadSpectrumIcons from "@budibase/bbui/spectrum-icons-vite.js"
import { get } from "svelte/store"
import { initWebsocket } from "./websocket.js"
// Provide svelte and svelte/internal as globals for custom components
import * as svelte from "svelte"
import * as internal from "svelte/internal"
window.svelte_internal = internal
window.svelte = svelte
// Initialise spectrum icons
loadSpectrumIcons()
let app
const loadBudibase = async () => {
// Update builder store with any builder flags
builderStore.set({
...get(builderStore),
inBuilder: !!window["##BUDIBASE_IN_BUILDER##"],
layout: window["##BUDIBASE_PREVIEW_LAYOUT##"],
screen: window["##BUDIBASE_PREVIEW_SCREEN##"],
selectedComponentId: window["##BUDIBASE_SELECTED_COMPONENT_ID##"],
previewId: window["##BUDIBASE_PREVIEW_ID##"],
theme: window["##BUDIBASE_PREVIEW_THEME##"],
customTheme: window["##BUDIBASE_PREVIEW_CUSTOM_THEME##"],
previewDevice: window["##BUDIBASE_PREVIEW_DEVICE##"],
navigation: window["##BUDIBASE_PREVIEW_NAVIGATION##"],
hiddenComponentIds: window["##BUDIBASE_HIDDEN_COMPONENT_IDS##"],
usedPlugins: window["##BUDIBASE_USED_PLUGINS##"],
location: window["##BUDIBASE_LOCATION##"],
snippets: window["##BUDIBASE_SNIPPETS##"],
componentErrors: window["##BUDIBASE_COMPONENT_ERRORS##"],
})
// Set app ID - this window flag is set by both the preview and the real
// server rendered app HTML
appStore.actions.setAppId(window["##BUDIBASE_APP_ID##"])
// Set the flag used to determine if the app is being loaded via an iframe
appStore.actions.setAppEmbedded(
window["##BUDIBASE_APP_EMBEDDED##"] === "true"
)
if (window.MIGRATING_APP) {
new UpdatingApp({
target: window.document.body,
})
return
}
// Fetch environment info
if (!get(environmentStore)?.loaded) {
await environmentStore.actions.fetchEnvironment()
}
// Register handler for runtime events from the builder
window.handleBuilderRuntimeEvent = (type, data) => {
if (!window["##BUDIBASE_IN_BUILDER##"]) {
return
}
if (type === "event-completed") {
eventStore.actions.resolveEvent(data)
} else if (type === "eject-block") {
const block = blockStore.actions.getBlock(data)
block?.eject()
} else if (type === "dragging-new-component") {
const { dragging, component } = data
if (dragging) {
const definition =
componentStore.actions.getComponentDefinition(component)
dndStore.actions.startDraggingNewComponent({ component, definition })
} else {
dndStore.actions.reset()
}
} else if (type === "request-context") {
const { selectedComponentInstance, screenslotInstance } =
get(componentStore)
const instance = selectedComponentInstance || screenslotInstance
const context = instance?.getDataContext()
let stringifiedContext = null
try {
stringifiedContext = JSON.stringify(context)
} catch (error) {
// Ignore - invalid context
}
eventStore.actions.dispatchEvent("provide-context", {
context: stringifiedContext,
})
} else if (type === "hover-component") {
hoverStore.actions.hoverComponent(data, false)
} else if (type === "builder-meta") {
builderStore.actions.setMetadata(data)
} else if (type === "builder-state") {
const [[key, value]] = Object.entries(data)
stateStore.actions.setValue(key, value)
} else if (type === "builder-url-test-data") {
const { route, testValue } = data
routeStore.actions.setTestUrlParams(route, testValue)
}
}
// Register any custom components
if (window["##BUDIBASE_CUSTOM_COMPONENTS##"]) {
window["##BUDIBASE_CUSTOM_COMPONENTS##"].forEach(component => {
componentStore.actions.registerCustomComponent(component)
})
}
// Make a callback available for custom component bundles to register
// themselves at runtime
window.registerCustomComponent =
componentStore.actions.registerCustomComponent
// Initialise websocket
initWebsocket()
// Create app if one hasn't been created yet
if (!app) {
app = new ClientApp({
target: window.document.body,
})
}
}
// Attach to window so the HTML template can call this when it loads
window.loadBudibase = loadBudibase

@ -1 +1 @@
Subproject commit 45f5673d5e5ab3c22deb6663cea2e31a628aa133
Subproject commit e3843dd4eaced68ae063355b77df200dbc789c98

View File

@ -1,24 +0,0 @@
const elastic: any = {}
elastic.Client = function () {
this.index = jest.fn().mockResolvedValue({ body: [] })
this.search = jest.fn().mockResolvedValue({
body: {
hits: {
hits: [
{
_source: {
name: "test",
},
},
],
},
},
})
this.update = jest.fn().mockResolvedValue({ body: [] })
this.delete = jest.fn().mockResolvedValue({ body: [] })
this.close = jest.fn()
}
module.exports = elastic

View File

@ -1,5 +1,6 @@
MSSQL_SHA=sha256:3b913841850a4d57fcfcb798be06acc88ea0f2acc5418bc0c140a43e91c4a545
MSSQL_SHA=sha256:d252932ef839c24c61c1139cc98f69c85ca774fa7c6bfaaa0015b7eb02b9dc87
MYSQL_SHA=sha256:9de9d54fecee6253130e65154b930978b1fcc336bcc86dfd06e89b72a2588ebe
POSTGRES_SHA=sha256:bd0d8e485d1aca439d39e5ea99b931160bd28d862e74c786f7508e9d0053090e
MONGODB_SHA=sha256:afa36bca12295b5f9dae68a493c706113922bdab520e901bd5d6c9d7247a1d8d
MARIADB_SHA=sha256:e59ba8783bf7bc02a4779f103bb0d8751ac0e10f9471089709608377eded7aa8
ELASTICSEARCH_SHA=sha256:9a6443f55243f6acbfeb4a112d15eb3b9aac74bf25e0e39fa19b3ddd3a6879d0

View File

@ -11,6 +11,7 @@ import {
UploadPluginResponse,
FetchPluginResponse,
DeletePluginResponse,
PluginMetadata,
} from "@budibase/types"
import env from "../../../environment"
import { clientAppSocket } from "../../../websockets"
@ -53,10 +54,11 @@ export async function create(
const { source, url, headers, githubToken } = ctx.request.body
try {
let metadata
let directory
let metadata: PluginMetadata
let directory: string
// Generating random name as a backup and needed for url
let name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000)
const name = "PLUGIN_" + Math.floor(100000 + Math.random() * 900000)
switch (source) {
case PluginSource.NPM: {
@ -81,12 +83,14 @@ export async function create(
directory = directoryUrl
break
}
default:
ctx.throw(400, "Invalid source")
}
pluginCore.validate(metadata?.schema)
pluginCore.validate(metadata.schema)
// Only allow components in cloud
if (!env.SELF_HOSTED && metadata?.schema?.type !== PluginType.COMPONENT) {
if (!env.SELF_HOSTED && metadata.schema?.type !== PluginType.COMPONENT) {
throw new Error(
"Only component plugins are supported outside of self-host"
)

View File

@ -165,7 +165,8 @@ describe("/datasources", () => {
})
const descriptions = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
plus: true,
exclude: [DatabaseName.SQS],
})
if (descriptions.length) {
@ -590,7 +591,8 @@ if (descriptions.length) {
}
const datasources = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS, DatabaseName.ORACLE],
plus: true,
exclude: [DatabaseName.SQS, DatabaseName.ORACLE],
})
if (datasources.length) {

View File

@ -9,7 +9,8 @@ import { Knex } from "knex"
import { generator } from "@budibase/backend-core/tests"
const descriptions = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
plus: true,
exclude: [DatabaseName.SQS],
})
if (descriptions.length) {

View File

@ -1,9 +1,6 @@
import * as setup from "./utilities"
import {
DatabaseName,
datasourceDescribe,
} from "../../../integrations/tests/utils"
import { datasourceDescribe } from "../../../integrations/tests/utils"
import tk from "timekeeper"
import emitter from "../../../../src/events"
@ -80,7 +77,7 @@ function encodeJS(binding: string) {
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
}
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
const descriptions = datasourceDescribe({ plus: true })
if (descriptions.length) {
describe.each(descriptions)(

View File

@ -1,8 +1,5 @@
import { tableForDatasource } from "../../../tests/utilities/structures"
import {
DatabaseName,
datasourceDescribe,
} from "../../../integrations/tests/utils"
import { datasourceDescribe } from "../../../integrations/tests/utils"
import {
context,
db as dbCore,
@ -60,7 +57,7 @@ jest.mock("@budibase/pro", () => ({
},
}))
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
const descriptions = datasourceDescribe({ plus: true })
if (descriptions.length) {
describe.each(descriptions)(
@ -3553,6 +3550,31 @@ if (descriptions.length) {
limit: 1,
}).toContainExactly([row])
})
isInternal &&
describe("search by _id for relations", () => {
it("can filter by the related _id", async () => {
await expectSearch({
query: {
equal: { "rel._id": row.rel[0]._id },
},
}).toContainExactly([row])
await expectSearch({
query: {
equal: { "rel._id": row.rel[1]._id },
},
}).toContainExactly([row])
})
it("can filter by the related _id and find nothing", async () => {
await expectSearch({
query: {
equal: { "rel._id": "rel_none" },
},
}).toFindNothing()
})
})
})
!isInternal &&

View File

@ -1,11 +1,3 @@
// Directly mock the AWS SDK
jest.mock("@aws-sdk/s3-request-presigner", () => ({
getSignedUrl: jest.fn(() => {
return `http://example.com`
}),
}))
jest.mock("@aws-sdk/client-s3")
import { Datasource, SourceName } from "@budibase/types"
import { setEnv } from "../../../environment"
import { getRequest, getConfig, afterAll as _afterAll } from "./utilities"
@ -92,7 +84,17 @@ describe("/static", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.signedUrl).toEqual("http://example.com")
expect(res.body.signedUrl).toStartWith(
"https://foo.s3.eu-west-1.amazonaws.com/bar?"
)
expect(res.body.signedUrl).toContain("X-Amz-Algorithm=AWS4-HMAC-SHA256")
expect(res.body.signedUrl).toContain("X-Amz-Credential=bb")
expect(res.body.signedUrl).toContain("X-Amz-Date=")
expect(res.body.signedUrl).toContain("X-Amz-Signature=")
expect(res.body.signedUrl).toContain("X-Amz-Expires=900")
expect(res.body.signedUrl).toContain("X-Amz-SignedHeaders=host")
expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
)

View File

@ -28,17 +28,14 @@ import * as setup from "./utilities"
import * as uuid from "uuid"
import { generator } from "@budibase/backend-core/tests"
import {
DatabaseName,
datasourceDescribe,
} from "../../../integrations/tests/utils"
import { datasourceDescribe } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
import timekeeper from "timekeeper"
const { basicTable } = setup.structures
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
const descriptions = datasourceDescribe({ plus: true })
if (descriptions.length) {
describe.each(descriptions)(

View File

@ -37,17 +37,14 @@ import {
ViewV2Type,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import {
DatabaseName,
datasourceDescribe,
} from "../../../integrations/tests/utils"
import { datasourceDescribe } from "../../../integrations/tests/utils"
import merge from "lodash/merge"
import { quotas } from "@budibase/pro"
import { context, db, events, roles, setEnv } from "@budibase/backend-core"
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/openai"
import nock from "nock"
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
const descriptions = datasourceDescribe({ plus: true })
if (descriptions.length) {
describe.each(descriptions)(

View File

@ -1,5 +1,5 @@
import * as automation from "../index"
import { Table, AutomationStatus } from "@budibase/types"
import { Table, AutomationStatus, EmptyFilterOption } from "@budibase/types"
import { createAutomationBuilder } from "./utilities/AutomationTestBuilder"
import TestConfiguration from "../../tests/utilities/TestConfiguration"
@ -280,4 +280,23 @@ describe("Branching automations", () => {
expect(results.steps[2].outputs.message).toContain("Special user")
})
it("should not fail with empty conditions", async () => {
const results = await createAutomationBuilder(config)
.onAppAction()
.branch({
specialBranch: {
steps: stepBuilder => stepBuilder.serverLog({ text: "Hello!" }),
condition: {
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
},
},
})
.test({ fields: { test_trigger: true } })
expect(results.steps[0].outputs.success).toEqual(false)
expect(results.steps[0].outputs.status).toEqual(
AutomationStatus.NO_CONDITION_MET
)
})
})

View File

@ -9,7 +9,8 @@ import { generator } from "@budibase/backend-core/tests"
import { createAutomationBuilder } from "../utilities/AutomationTestBuilder"
const descriptions = datasourceDescribe({
exclude: [DatabaseName.MONGODB, DatabaseName.SQS],
plus: true,
exclude: [DatabaseName.SQS],
})
if (descriptions.length) {

View File

@ -1,3 +1,4 @@
import { SendEmailResponse } from "@budibase/types"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
import * as workerRequests from "../../../utilities/workerRequests"
@ -5,17 +6,18 @@ jest.mock("../../../utilities/workerRequests", () => ({
sendSmtpEmail: jest.fn(),
}))
function generateResponse(to: string, from: string) {
function generateResponse(to: string, from: string): SendEmailResponse {
return {
success: true,
response: {
accepted: [to],
envelope: {
from: from,
to: [to],
},
message: `Email sent to ${to}.`,
message: `Email sent to ${to}.`,
accepted: [to],
envelope: {
from: from,
to: [to],
},
messageId: "messageId",
pending: [],
rejected: [],
response: "response",
}
}

View File

@ -10,7 +10,7 @@ import {
import { Client, ClientOptions } from "@elastic/elasticsearch"
import { HOST_ADDRESS } from "./utils"
interface ElasticsearchConfig {
export interface ElasticsearchConfig {
url: string
ssl?: boolean
ca?: string
@ -99,9 +99,9 @@ const SCHEMA: Integration = {
},
}
class ElasticSearchIntegration implements IntegrationBase {
export class ElasticSearchIntegration implements IntegrationBase {
private config: ElasticsearchConfig
private client
private client: Client
constructor(config: ElasticsearchConfig) {
this.config = config
@ -132,20 +132,23 @@ class ElasticSearchIntegration implements IntegrationBase {
}
}
async create(query: { index: string; json: object }) {
const { index, json } = query
async create(query: {
index: string
json: object
extra?: Record<string, string>
}) {
const { index, json, extra } = query
try {
const result = await this.client.index({
index,
body: json,
...extra,
})
return result.body
} catch (err) {
console.error("Error writing to elasticsearch", err)
throw err
} finally {
await this.client.close()
}
}
@ -160,41 +163,46 @@ class ElasticSearchIntegration implements IntegrationBase {
} catch (err) {
console.error("Error querying elasticsearch", err)
throw err
} finally {
await this.client.close()
}
}
async update(query: { id: string; index: string; json: object }) {
const { id, index, json } = query
async update(query: {
id: string
index: string
json: object
extra?: Record<string, string>
}) {
const { id, index, json, extra } = query
try {
const result = await this.client.update({
id,
index,
body: json,
...extra,
})
return result.body
} catch (err) {
console.error("Error querying elasticsearch", err)
throw err
} finally {
await this.client.close()
}
}
async delete(query: { id: string; index: string }) {
const { id, index } = query
async delete(query: {
id: string
index: string
extra?: Record<string, string>
}) {
const { id, index, extra } = query
try {
const result = await this.client.delete({
id,
index,
...extra,
})
return result.body
} catch (err) {
console.error("Error deleting from elasticsearch", err)
throw err
} finally {
await this.client.close()
}
}
}

View File

@ -1,83 +1,81 @@
import { default as ElasticSearchIntegration } from "../elasticsearch"
import { Datasource } from "@budibase/types"
import { ElasticsearchConfig, ElasticSearchIntegration } from "../elasticsearch"
import { generator } from "@budibase/backend-core/tests"
import { DatabaseName, datasourceDescribe } from "./utils"
jest.mock("@elastic/elasticsearch")
const describes = datasourceDescribe({ only: [DatabaseName.ELASTICSEARCH] })
class TestConfiguration {
integration: any
if (describes.length) {
describe.each(describes)("Elasticsearch Integration", ({ dsProvider }) => {
let datasource: Datasource
let integration: ElasticSearchIntegration
constructor(config: any = {}) {
this.integration = new ElasticSearchIntegration.integration(config)
}
let index: string
beforeAll(async () => {
const ds = await dsProvider()
datasource = ds.datasource!
})
beforeEach(() => {
index = generator.guid()
integration = new ElasticSearchIntegration(
datasource.config! as ElasticsearchConfig
)
})
it("can create a record", async () => {
await integration.create({
index,
json: { name: "Hello" },
extra: { refresh: "true" },
})
const records = await integration.read({
index,
json: { query: { match_all: {} } },
})
expect(records).toEqual([{ name: "Hello" }])
})
it("can update a record", async () => {
const create = await integration.create({
index,
json: { name: "Hello" },
extra: { refresh: "true" },
})
await integration.update({
id: create._id,
index,
json: { doc: { name: "World" } },
extra: { refresh: "true" },
})
const records = await integration.read({
index,
json: { query: { match_all: {} } },
})
expect(records).toEqual([{ name: "World" }])
})
it("can delete a record", async () => {
const create = await integration.create({
index,
json: { name: "Hello" },
extra: { refresh: "true" },
})
await integration.delete({
id: create._id,
index,
extra: { refresh: "true" },
})
const records = await integration.read({
index,
json: { query: { match_all: {} } },
})
expect(records).toEqual([])
})
})
}
describe("Elasticsearch Integration", () => {
let config: any
let indexName = "Users"
beforeEach(() => {
config = new TestConfiguration()
})
it("calls the create method with the correct params", async () => {
const body = {
name: "Hello",
}
await config.integration.create({
index: indexName,
json: body,
})
expect(config.integration.client.index).toHaveBeenCalledWith({
index: indexName,
body,
})
})
it("calls the read method with the correct params", async () => {
const body = {
query: {
term: {
name: "kimchy",
},
},
}
const response = await config.integration.read({
index: indexName,
json: body,
})
expect(config.integration.client.search).toHaveBeenCalledWith({
index: indexName,
body,
})
expect(response).toEqual(expect.any(Array))
})
it("calls the update method with the correct params", async () => {
const body = {
name: "updated",
}
const response = await config.integration.update({
id: "1234",
index: indexName,
json: body,
})
expect(config.integration.client.update).toHaveBeenCalledWith({
id: "1234",
index: indexName,
body,
})
expect(response).toEqual(expect.any(Array))
})
it("calls the delete method with the correct params", async () => {
const body = {
id: "1234",
}
const response = await config.integration.delete(body)
expect(config.integration.client.delete).toHaveBeenCalledWith(body)
expect(response).toEqual(expect.any(Array))
})
})

View File

@ -0,0 +1,54 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait } from "testcontainers"
import { testContainerUtils } from "@budibase/backend-core/tests"
import { startContainer } from "."
import { ELASTICSEARCH_IMAGE } from "./images"
import { ElasticsearchConfig } from "../../elasticsearch"
let ports: Promise<testContainerUtils.Port[]>
export async function getDatasource(): Promise<Datasource> {
if (!ports) {
ports = startContainer(
new GenericContainer(ELASTICSEARCH_IMAGE)
.withExposedPorts(9200)
.withEnvironment({
// We need to set the discovery type to single-node to avoid the
// cluster waiting for other nodes to join before starting up.
"discovery.type": "single-node",
// We disable security to avoid having to do any auth against the
// container, and to disable SSL. With SSL enabled it uses a self
// signed certificate that we'd have to ignore anyway.
"xpack.security.enabled": "false",
})
.withWaitStrategy(
Wait.forHttp(
// Single node clusters never reach status green, so we wait for
// yellow instead.
"/_cluster/health?wait_for_status=yellow&timeout=10s",
9200
).withStartupTimeout(60000)
)
// We gave the container a tmpfs data directory. Without this, I found
// that the default data directory was very small and the container
// easily filled it up. This caused the cluster to go into a red status
// and stop responding to requests.
.withTmpFs({ "/usr/share/elasticsearch/data": "rw" })
)
}
const port = (await ports).find(x => x.container === 9200)?.host
if (!port) {
throw new Error("Elasticsearch port not found")
}
const config: ElasticsearchConfig = {
url: `http://127.0.0.1:${port}`,
}
return {
type: "datasource",
source: SourceName.ELASTICSEARCH,
config,
}
}

View File

@ -12,3 +12,4 @@ export const POSTGRES_IMAGE = `postgres@${process.env.POSTGRES_SHA}`
export const POSTGRES_LEGACY_IMAGE = `postgres:9.5.25`
export const MONGODB_IMAGE = `mongo@${process.env.MONGODB_SHA}`
export const MARIADB_IMAGE = `mariadb@${process.env.MARIADB_SHA}`
export const ELASTICSEARCH_IMAGE = `elasticsearch@${process.env.ELASTICSEARCH_SHA}`

View File

@ -6,6 +6,7 @@ import * as mysql from "./mysql"
import * as mssql from "./mssql"
import * as mariadb from "./mariadb"
import * as oracle from "./oracle"
import * as elasticsearch from "./elasticsearch"
import { testContainerUtils } from "@budibase/backend-core/tests"
import { Knex } from "knex"
import TestConfiguration from "../../../tests/utilities/TestConfiguration"
@ -23,22 +24,32 @@ export enum DatabaseName {
MARIADB = "mariadb",
ORACLE = "oracle",
SQS = "sqs",
ELASTICSEARCH = "elasticsearch",
}
const DATASOURCE_PLUS = [
DatabaseName.POSTGRES,
DatabaseName.POSTGRES_LEGACY,
DatabaseName.MYSQL,
DatabaseName.SQL_SERVER,
DatabaseName.MARIADB,
DatabaseName.ORACLE,
DatabaseName.SQS,
]
const providers: Record<DatabaseName, DatasourceProvider> = {
// datasource_plus entries
[DatabaseName.POSTGRES]: postgres.getDatasource,
[DatabaseName.POSTGRES_LEGACY]: postgres.getLegacyDatasource,
[DatabaseName.MONGODB]: mongodb.getDatasource,
[DatabaseName.MYSQL]: mysql.getDatasource,
[DatabaseName.SQL_SERVER]: mssql.getDatasource,
[DatabaseName.MARIADB]: mariadb.getDatasource,
[DatabaseName.ORACLE]: oracle.getDatasource,
[DatabaseName.SQS]: async () => undefined,
}
export interface DatasourceDescribeOpts {
only?: DatabaseName[]
exclude?: DatabaseName[]
// rest
[DatabaseName.ELASTICSEARCH]: elasticsearch.getDatasource,
[DatabaseName.MONGODB]: mongodb.getDatasource,
}
export interface DatasourceDescribeReturnPromise {
@ -103,6 +114,20 @@ function createDummyTest() {
})
}
interface OnlyOpts {
only: DatabaseName[]
}
interface PlusOpts {
plus: true
exclude?: DatabaseName[]
}
export type DatasourceDescribeOpts = OnlyOpts | PlusOpts
// If you ever want to rename this function, be mindful that you will also need
// to modify src/tests/filters/index.js to make sure that we're correctly
// filtering datasource/non-datasource tests in CI.
export function datasourceDescribe(opts: DatasourceDescribeOpts) {
// tests that call this need a lot longer timeouts
jest.setTimeout(120000)
@ -111,17 +136,15 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
createDummyTest()
}
const { only, exclude } = opts
if (only && exclude) {
throw new Error("you can only supply one of 'only' or 'exclude'")
}
let databases = Object.values(DatabaseName)
if (only) {
databases = only
} else if (exclude) {
databases = databases.filter(db => !exclude.includes(db))
let databases: DatabaseName[] = []
if ("only" in opts) {
databases = opts.only
} else if ("plus" in opts) {
databases = Object.values(DatabaseName)
.filter(db => DATASOURCE_PLUS.includes(db))
.filter(db => !opts.exclude?.includes(db))
} else {
throw new Error("invalid options")
}
if (process.env.DATASOURCE) {
@ -156,6 +179,7 @@ export function datasourceDescribe(opts: DatasourceDescribeOpts) {
isMSSQL: dbName === DatabaseName.SQL_SERVER,
isOracle: dbName === DatabaseName.ORACLE,
isMariaDB: dbName === DatabaseName.MARIADB,
isElasticsearch: dbName === DatabaseName.ELASTICSEARCH,
}))
}

View File

@ -23,7 +23,7 @@ export async function getDatasource(): Promise<Datasource> {
})
.withWaitStrategy(
Wait.forSuccessfulCommand(
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
"/opt/mssql-tools18/bin/sqlcmd -C -S localhost -U sa -P Password_123 -q 'SELECT 1'"
).withStartupTimeout(20000)
)
)
@ -44,7 +44,8 @@ export async function getDatasource(): Promise<Datasource> {
user: "sa",
password: "Password_123",
options: {
encrypt: false,
encrypt: true,
trustServerCertificate: true,
},
},
}

View File

@ -7,6 +7,7 @@ import {
} from "@budibase/types"
import { cloneDeep } from "lodash/fp"
import sdk from "../../../sdk"
import { isInternal } from "../tables/utils"
export const removeInvalidFilters = (
filters: SearchFilters,
@ -70,6 +71,10 @@ export const getQueryableFields = async (
opts?: { noRelationships?: boolean }
): Promise<string[]> => {
const result = []
if (isInternal({ table })) {
result.push("_id")
}
for (const field of Object.keys(table.schema).filter(
f => allowedFields.includes(f) && table.schema[f].visible !== false
)) {
@ -113,14 +118,13 @@ export const getQueryableFields = async (
return result
}
const result = [
"_id", // Querying by _id is always allowed, even if it's never part of the schema
]
// Querying by _id is always allowed, even if it's never part of the schema
const result = ["_id"]
if (fields == null) {
fields = Object.keys(table.schema)
}
result.push(...(await extractTableFields(table, fields, [table._id!])))
return result
return Array.from(new Set(result))
}

View File

@ -10,16 +10,13 @@ import {
import { search } from "../../../../../sdk/app/rows/search"
import { generator } from "@budibase/backend-core/tests"
import {
DatabaseName,
datasourceDescribe,
} from "../../../../../integrations/tests/utils"
import { datasourceDescribe } from "../../../../../integrations/tests/utils"
import { tableForDatasource } from "../../../../../tests/utilities/structures"
// These test cases are only for things that cannot be tested through the API
// (e.g. limiting searches to returning specific fields). If it's possible to
// test through the API, it should be done there instead.
const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] })
const descriptions = datasourceDescribe({ plus: true })
if (descriptions.length) {
describe.each(descriptions)(

View File

@ -250,6 +250,8 @@ describe("query utils", () => {
expect(result).toEqual([
"_id",
"name",
"aux._id",
"auxTable._id",
"aux.title",
"auxTable.title",
"aux.name",
@ -284,7 +286,14 @@ describe("query utils", () => {
const result = await config.doInContext(config.appId, () => {
return getQueryableFields(table)
})
expect(result).toEqual(["_id", "name", "aux.name", "auxTable.name"])
expect(result).toEqual([
"_id",
"name",
"aux._id",
"auxTable._id",
"aux.name",
"auxTable.name",
])
})
it("excludes all relationship fields if hidden", async () => {
@ -387,10 +396,14 @@ describe("query utils", () => {
"_id",
"name",
// aux1 primitive props
"aux1._id",
"aux1Table._id",
"aux1.name",
"aux1Table.name",
// aux2 primitive props
"aux2._id",
"aux2Table._id",
"aux2.title",
"aux2Table.title",
])
@ -405,14 +418,18 @@ describe("query utils", () => {
"name",
// aux2_1 primitive props
"aux2_1._id",
"aux2Table._id",
"aux2_1.title",
"aux2Table.title",
// aux2_2 primitive props
"aux2_2._id",
"aux2_2.title",
"aux2Table.title",
// table primitive props
"table._id",
"TestTable._id",
"table.name",
"TestTable.name",
])
@ -427,14 +444,18 @@ describe("query utils", () => {
"title",
// aux1_1 primitive props
"aux1_1._id",
"aux1Table._id",
"aux1_1.name",
"aux1Table.name",
// aux1_2 primitive props
"aux1_2._id",
"aux1_2.name",
"aux1Table.name",
// table primitive props
"table._id",
"TestTable._id",
"table.name",
"TestTable.name",
])
@ -481,6 +502,8 @@ describe("query utils", () => {
"name",
// deep 1 aux primitive props
"aux._id",
"auxTable._id",
"aux.title",
"auxTable.title",
])
@ -495,6 +518,8 @@ describe("query utils", () => {
"title",
// deep 1 dependency primitive props
"table._id",
"TestTable._id",
"table.name",
"TestTable.name",
])

View File

@ -1,108 +0,0 @@
import {
FieldType,
INTERNAL_TABLE_SOURCE_ID,
Table,
TableSourceType,
ViewV2,
} from "@budibase/types"
import { generator } from "@budibase/backend-core/tests"
import sdk from "../../.."
jest.mock("../../views", () => ({
...jest.requireActual("../../views"),
enrichSchema: jest.fn().mockImplementation(v => ({ ...v, mocked: true })),
}))
describe("table sdk", () => {
describe("enrichViewSchemas", () => {
const basicTable: Table = {
_id: generator.guid(),
name: "TestTable",
type: "table",
sourceId: INTERNAL_TABLE_SOURCE_ID,
sourceType: TableSourceType.INTERNAL,
schema: {
name: {
type: FieldType.STRING,
name: "name",
visible: true,
width: 80,
order: 2,
constraints: {
type: "string",
},
},
description: {
type: FieldType.STRING,
name: "description",
visible: true,
width: 200,
constraints: {
type: "string",
},
},
id: {
type: FieldType.NUMBER,
name: "id",
visible: true,
order: 1,
constraints: {
type: "number",
},
},
hiddenField: {
type: FieldType.STRING,
name: "hiddenField",
visible: false,
constraints: {
type: "string",
},
},
},
}
it("should fetch the default schema if not overriden", async () => {
const tableId = basicTable._id!
function getTable() {
const view: ViewV2 = {
version: 2,
id: generator.guid(),
name: generator.guid(),
tableId,
}
return view
}
const view1 = getTable()
const view2 = getTable()
const view3 = getTable()
const res = await sdk.tables.enrichViewSchemas({
...basicTable,
views: {
[view1.name]: view1,
[view2.name]: view2,
[view3.name]: view3,
},
})
expect(sdk.views.enrichSchema).toHaveBeenCalledTimes(3)
expect(res).toEqual({
...basicTable,
views: {
[view1.name]: {
...view1,
mocked: true,
},
[view2.name]: {
...view2,
mocked: true,
},
[view3.name]: {
...view3,
mocked: true,
},
},
})
})
})
})

View File

@ -7,7 +7,7 @@ import { default as queries } from "./app/queries"
import { default as rows } from "./app/rows"
import { default as links } from "./app/links"
import { default as users } from "./users"
import { default as plugins } from "./plugins"
import * as plugins from "./plugins"
import * as views from "./app/views"
import * as permissions from "./app/permissions"
import * as rowActions from "./app/rowActions"

View File

@ -1,5 +1,41 @@
import * as plugins from "./plugins"
import { KoaFile, Plugin, PluginSource, PluginType } from "@budibase/types"
import {
db as dbCore,
objectStore,
plugins as pluginCore,
tenancy,
} from "@budibase/backend-core"
import { fileUpload } from "../../api/controllers/plugin/file"
import env from "../../environment"
import { clientAppSocket } from "../../websockets"
import { sdk as pro } from "@budibase/pro"
export default {
...plugins,
export async function fetch(type?: PluginType): Promise<Plugin[]> {
const db = tenancy.getGlobalDB()
const response = await db.allDocs(
dbCore.getPluginParams(null, {
include_docs: true,
})
)
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
plugins = await objectStore.enrichPluginURLs(plugins)
if (type) {
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function processUploaded(plugin: KoaFile, source: PluginSource) {
const { metadata, directory } = await fileUpload(plugin)
pluginCore.validate(metadata.schema)
// Only allow components in cloud
if (!env.SELF_HOSTED && metadata.schema?.type !== PluginType.COMPONENT) {
throw new Error("Only component plugins are supported outside of self-host")
}
const doc = await pro.plugins.storePlugin(metadata, directory, source)
clientAppSocket?.emit("plugin-update", { name: doc.name, hash: doc.hash })
return doc
}

View File

@ -1,41 +0,0 @@
import { KoaFile, Plugin, PluginSource, PluginType } from "@budibase/types"
import {
db as dbCore,
objectStore,
plugins as pluginCore,
tenancy,
} from "@budibase/backend-core"
import { fileUpload } from "../../api/controllers/plugin/file"
import env from "../../environment"
import { clientAppSocket } from "../../websockets"
import { sdk as pro } from "@budibase/pro"
export async function fetch(type?: PluginType): Promise<Plugin[]> {
const db = tenancy.getGlobalDB()
const response = await db.allDocs(
dbCore.getPluginParams(null, {
include_docs: true,
})
)
let plugins = response.rows.map((row: any) => row.doc) as Plugin[]
plugins = await objectStore.enrichPluginURLs(plugins)
if (type) {
return plugins.filter((plugin: Plugin) => plugin.schema?.type === type)
} else {
return plugins
}
}
export async function processUploaded(plugin: KoaFile, source?: PluginSource) {
const { metadata, directory } = await fileUpload(plugin)
pluginCore.validate(metadata?.schema)
// Only allow components in cloud
if (!env.SELF_HOSTED && metadata?.schema?.type !== PluginType.COMPONENT) {
throw new Error("Only component plugins are supported outside of self-host")
}
const doc = await pro.plugins.storePlugin(metadata, directory, source)
clientAppSocket?.emit("plugin-update", { name: doc.name, hash: doc.hash })
return doc
}

View File

@ -367,6 +367,8 @@ class Orchestrator {
if (e.errno === "ETIME") {
span?.addTags({ timedOut: true })
console.warn(`Automation execution timed out after ${timeout}ms`)
} else {
throw e
}
}

View File

@ -1,4 +1,4 @@
import { Plugin } from "@budibase/types"
import { Plugin, PluginUpload } from "@budibase/types"
import { budibaseTempDir } from "../budibaseDir"
import fs from "fs"
import { join } from "path"
@ -8,31 +8,31 @@ import stream from "stream"
const DATASOURCE_PATH = join(budibaseTempDir(), "datasource")
const AUTOMATION_PATH = join(budibaseTempDir(), "automation")
export const getPluginMetadata = async (path: string) => {
let metadata: any = {}
export const getPluginMetadata = async (
path: string
): Promise<PluginUpload> => {
let pkg: any
let schema: any
try {
const pkg = fs.readFileSync(join(path, "package.json"), "utf8")
const schema = fs.readFileSync(join(path, "schema.json"), "utf8")
metadata.schema = JSON.parse(schema)
metadata.package = JSON.parse(pkg)
if (
!metadata.package.name ||
!metadata.package.version ||
!metadata.package.description
) {
throw new Error(
"package.json is missing one of 'name', 'version' or 'description'."
)
pkg = JSON.parse(fs.readFileSync(join(path, "package.json"), "utf8"))
schema = JSON.parse(fs.readFileSync(join(path, "schema.json"), "utf8"))
if (!pkg.name) {
throw new Error("package.json is missing 'name'.")
}
if (!pkg.version) {
throw new Error("package.json is missing 'version'.")
}
if (!pkg.description) {
throw new Error("package.json is missing 'description'.")
}
} catch (err: any) {
throw new Error(
`Unable to process schema.json/package.json in plugin. ${err.message}`
`Unable to process schema.json/package.json in plugin. ${err.message}`,
{ cause: err }
)
}
return { metadata, directory: path }
return { metadata: { package: pkg, schema }, directory: path }
}
async function getPluginImpl(path: string, plugin: Plugin) {

View File

@ -8,7 +8,15 @@ import {
logging,
env as coreEnv,
} from "@budibase/backend-core"
import { Ctx, User, EmailInvite, EmailAttachment } from "@budibase/types"
import {
Ctx,
User,
EmailInvite,
EmailAttachment,
SendEmailResponse,
SendEmailRequest,
EmailTemplatePurpose,
} from "@budibase/types"
interface Request {
ctx?: Ctx
@ -110,25 +118,23 @@ export async function sendSmtpEmail({
invite?: EmailInvite
}) {
// tenant ID will be set in header
const request: SendEmailRequest = {
email: to,
from,
contents,
subject,
cc,
bcc,
purpose: EmailTemplatePurpose.CUSTOM,
automation,
invite,
attachments,
}
const response = await fetch(
checkSlashesInUrl(env.WORKER_URL + `/api/global/email/send`),
createRequest({
method: "POST",
body: {
email: to,
from,
contents,
subject,
cc,
bcc,
purpose: "custom",
automation,
invite,
attachments,
},
})
createRequest({ method: "POST", body: request })
)
return checkResponse(response, "send email")
return (await checkResponse(response, "send email")) as SendEmailResponse
}
export async function removeAppFromUserRoles(ctx: Ctx, appId: string) {

View File

@ -3,7 +3,8 @@ import env from "./environment"
import chokidar from "chokidar"
import fs from "fs"
import { constants, tenancy } from "@budibase/backend-core"
import pluginsSdk from "./sdk/plugins"
import { processUploaded } from "./sdk/plugins"
import { PluginSource } from "@budibase/types"
export function watch() {
const watchPath = path.join(env.PLUGINS_DIR, "./**/*.tar.gz")
@ -27,7 +28,7 @@ export function watch() {
const split = path.split("/")
const name = split[split.length - 1]
console.log("Importing plugin:", path)
await pluginsSdk.processUploaded({ name, path })
await processUploaded({ name, path }, PluginSource.FILE)
} catch (err: any) {
const message = err?.message ? err?.message : err
console.error("Failed to import plugin:", message)

View File

@ -17,6 +17,7 @@
"@budibase/nano": "10.1.5",
"@types/json-schema": "^7.0.15",
"@types/koa": "2.13.4",
"@types/nodemailer": "^6.4.17",
"@types/redlock": "4.0.7",
"koa-useragent": "^4.1.0",
"rimraf": "3.0.2",

View File

@ -1,4 +1,5 @@
import { EmailAttachment, EmailInvite } from "../../../documents"
import SMTPTransport from "nodemailer/lib/smtp-transport"
export enum EmailTemplatePurpose {
CORE = "core",
@ -10,19 +11,18 @@ export enum EmailTemplatePurpose {
}
export interface SendEmailRequest {
workspaceId?: string
email: string
userId: string
userId?: string
purpose: EmailTemplatePurpose
contents?: string
from?: string
subject: string
cc?: boolean
bcc?: boolean
cc?: string
bcc?: string
automation?: boolean
invite?: EmailInvite
attachments?: EmailAttachment[]
}
export interface SendEmailResponse extends Record<string, any> {
export interface SendEmailResponse extends SMTPTransport.SentMessageInfo {
message: string
}

View File

@ -1,10 +1,10 @@
import { Document } from "../../document"
import { User } from "../../global"
import { ReadStream } from "fs"
import { Row } from "../row"
import { Table } from "../table"
import { AutomationStep, AutomationTrigger } from "./schema"
import { ContextEmitter } from "../../../sdk"
import { Readable } from "stream"
export enum AutomationIOType {
OBJECT = "object",
@ -99,7 +99,7 @@ export interface SendEmailOpts {
// workspaceId If finer grain controls being used then this will lookup config for workspace.
workspaceId?: string
// user If sending to an existing user the object can be provided, this is used in the context.
user: User
user?: User
// from If sending from an address that is not what is configured in the SMTP config.
from?: string
// contents If sending a custom email then can supply contents which will be added to it.
@ -108,8 +108,8 @@ export interface SendEmailOpts {
subject: string
// info Pass in a structure of information to be stored alongside the invitation.
info?: any
cc?: boolean
bcc?: boolean
cc?: string
bcc?: string
automation?: boolean
invite?: EmailInvite
attachments?: EmailAttachment[]
@ -269,7 +269,7 @@ export type AutomationAttachment = {
export type AutomationAttachmentContent = {
filename: string
content: ReadStream | NodeJS.ReadableStream
content: Readable
}
export type BucketedContent = AutomationAttachmentContent & {

View File

@ -24,10 +24,7 @@ export interface Plugin extends Document {
source: PluginSource
package: { [key: string]: any }
hash: string
schema: {
type: PluginType
[key: string]: any
}
schema: PluginSchema
iconFileName?: string
// Populated on read
jsUrl?: string
@ -36,3 +33,24 @@ export interface Plugin extends Document {
}
export const PLUGIN_TYPE_ARR = Object.values(PluginType)
export interface PluginSchema {
type: PluginType
[key: string]: any
}
interface Package {
name: string
version: string
description: string
}
export interface PluginMetadata {
schema: PluginSchema
package: Package
}
export interface PluginUpload {
metadata: PluginMetadata
directory: string
}

View File

@ -3,6 +3,7 @@ import { Row, DocumentType, Table, Datasource } from "../documents"
import { SortOrder, SortType } from "../api"
import { Knex } from "knex"
import { Aggregation } from "./row"
import _ from "lodash"
export enum BasicOperator {
EQUAL = "equal",
@ -83,7 +84,7 @@ type RangeFilter = Record<
type LogicalFilter = { conditions: SearchFilters[] }
export function isLogicalFilter(filter: any): filter is LogicalFilter {
return "conditions" in filter
return _.isPlainObject(filter) && "conditions" in filter
}
export type AnySearchFilter = BasicFilter | ArrayFilter | RangeFilter

View File

@ -85,11 +85,14 @@
"@types/jsonwebtoken": "9.0.3",
"@types/koa__router": "12.0.4",
"@types/lodash": "4.14.200",
"@types/maildev": "^0.0.7",
"@types/node-fetch": "2.6.4",
"@types/nodemailer": "^6.4.17",
"@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.14",
"@types/uuid": "8.3.4",
"jest": "29.7.0",
"maildev": "^2.2.1",
"nock": "^13.5.4",
"nodemon": "2.0.15",
"rimraf": "3.0.2",

View File

@ -11,7 +11,6 @@ export async function sendEmail(
ctx: UserCtx<SendEmailRequest, SendEmailResponse>
) {
let {
workspaceId,
email,
userId,
purpose,
@ -24,13 +23,15 @@ export async function sendEmail(
invite,
attachments,
} = ctx.request.body
let user: any
let user: User | undefined = undefined
if (userId) {
const db = tenancy.getGlobalDB()
user = await db.get<User>(userId)
user = await db.tryGet<User>(userId)
if (!user) {
ctx.throw(404, "User not found.")
}
}
const response = await sendEmailFn(email, purpose, {
workspaceId,
user,
contents,
from,

View File

@ -1,33 +1,269 @@
jest.mock("nodemailer")
import { EmailTemplatePurpose } from "@budibase/types"
import { TestConfiguration, mocks } from "../../../../tests"
const sendMailMock = mocks.email.mock()
import { EmailTemplatePurpose, SendEmailRequest } from "@budibase/types"
import { TestConfiguration } from "../../../../tests"
import {
captureEmail,
deleteAllEmail,
getAttachments,
Mailserver,
startMailserver,
stopMailserver,
} from "../../../../tests/mocks/email"
import { objectStore } from "@budibase/backend-core"
describe("/api/global/email", () => {
const config = new TestConfiguration()
let mailserver: Mailserver
beforeAll(async () => {
await config.beforeAll()
mailserver = await startMailserver(config)
})
afterAll(async () => {
await stopMailserver(mailserver)
await config.afterAll()
})
it("should be able to send an email (with mocking)", async () => {
// initially configure settings
await config.saveSmtpConfig()
await config.saveSettingsConfig()
beforeEach(async () => {
await deleteAllEmail(mailserver)
})
const res = await config.api.emails.sendEmail(
EmailTemplatePurpose.INVITATION
interface TestCase {
req: Partial<SendEmailRequest>
expectedStatus?: number
expectedContents?: string
}
const testCases: TestCase[] = [
{
req: {
purpose: EmailTemplatePurpose.WELCOME,
},
expectedContents: `Thanks for getting started with Budibase's Budibase platform.`,
},
{
req: {
purpose: EmailTemplatePurpose.INVITATION,
},
expectedContents: `Use the button below to set up your account and get started:`,
},
{
req: {
purpose: EmailTemplatePurpose.PASSWORD_RECOVERY,
},
expectedContents: `You recently requested to reset your password for your Budibase account in your Budibase platform`,
},
{
req: {
purpose: EmailTemplatePurpose.CUSTOM,
contents: "Hello, world!",
},
expectedContents: "Hello, world!",
},
]
it.each(testCases)(
"can send $req.purpose emails",
async ({ req, expectedContents, expectedStatus }) => {
const email = await captureEmail(mailserver, async () => {
const res = await config.api.emails.sendEmail(
{
email: "to@example.com",
subject: "Test",
userId: config.user!._id,
purpose: EmailTemplatePurpose.WELCOME,
...req,
},
{
status: expectedStatus || 200,
}
)
expect(res.message).toBeDefined()
})
expect(email.html).toContain(expectedContents)
expect(email.html).not.toContain("Invalid binding")
}
)
it("should be able to send an email with an attachment", async () => {
let bucket = "testbucket"
let filename = "test.txt"
await objectStore.upload({
bucket,
filename,
body: Buffer.from("test data"),
})
let presignedUrl = await objectStore.getPresignedUrl(
bucket,
filename,
60000
)
expect(res.body.message).toBeDefined()
expect(sendMailMock).toHaveBeenCalled()
const emailCall = sendMailMock.mock.calls[0][0]
expect(emailCall.subject).toBe("Hello!")
expect(emailCall.html).not.toContain("Invalid binding")
let attachmentObject = {
url: presignedUrl,
filename,
}
const email = await captureEmail(mailserver, async () => {
const res = await config.api.emails.sendEmail({
email: "to@example.com",
subject: "Test",
userId: config.user!._id,
purpose: EmailTemplatePurpose.WELCOME,
attachments: [attachmentObject],
})
expect(res.message).toBeDefined()
})
expect(email.html).toContain(
"Thanks for getting started with Budibase's Budibase platform."
)
expect(email.html).not.toContain("Invalid binding")
const attachments = await getAttachments(mailserver, email)
expect(attachments).toEqual(["test data"])
})
it("should be able to send email without a userId", async () => {
const res = await config.api.emails.sendEmail({
email: "to@example.com",
subject: "Test",
purpose: EmailTemplatePurpose.WELCOME,
})
expect(res.message).toBeDefined()
})
it("should fail to send a password reset email without a userId", async () => {
const res = await config.api.emails.sendEmail(
{
email: "to@example.com",
subject: "Test",
purpose: EmailTemplatePurpose.PASSWORD_RECOVERY,
},
{
status: 400,
}
)
expect(res.message).toBeDefined()
})
it("can cc people", async () => {
const email = await captureEmail(mailserver, async () => {
await config.api.emails.sendEmail({
email: "to@example.com",
cc: "cc@example.com",
subject: "Test",
purpose: EmailTemplatePurpose.CUSTOM,
contents: "Hello, world!",
})
})
expect(email.cc).toEqual([{ address: "cc@example.com", name: "" }])
})
it("can bcc people", async () => {
const email = await captureEmail(mailserver, async () => {
await config.api.emails.sendEmail({
email: "to@example.com",
bcc: "bcc@example.com",
subject: "Test",
purpose: EmailTemplatePurpose.CUSTOM,
contents: "Hello, world!",
})
})
expect(email.calculatedBcc).toEqual([
{ address: "bcc@example.com", name: "" },
])
})
it("can change the from address", async () => {
const email = await captureEmail(mailserver, async () => {
const res = await config.api.emails.sendEmail({
email: "to@example.com",
from: "from@example.com",
subject: "Test",
purpose: EmailTemplatePurpose.CUSTOM,
contents: "Hello, world!",
})
expect(res.message).toBeDefined()
})
expect(email.to).toEqual([{ address: "to@example.com", name: "" }])
expect(email.from).toEqual([{ address: "from@example.com", name: "" }])
})
it("can send a calendar invite", async () => {
const startTime = new Date()
const endTime = new Date()
const email = await captureEmail(mailserver, async () => {
await config.api.emails.sendEmail({
email: "to@example.com",
subject: "Test",
purpose: EmailTemplatePurpose.CUSTOM,
contents: "Hello, world!",
invite: {
startTime,
endTime,
summary: "Summary",
location: "Location",
url: "http://example.com",
},
})
})
expect(email.alternatives).toEqual([
{
charset: "utf-8",
contentType: "text/calendar",
method: "REQUEST",
transferEncoding: "7bit",
content: expect.any(String),
},
])
// Reference iCal invite:
// BEGIN:VCALENDAR
// VERSION:2.0
// PRODID:-//sebbo.net//ical-generator//EN
// NAME:Invite
// X-WR-CALNAME:Invite
// BEGIN:VEVENT
// UID:2b5947b7-ec5a-4341-8d70-8d8130183f2a
// SEQUENCE:0
// DTSTAMP:20200101T000000Z
// DTSTART:20200101T000000Z
// DTEND:20200101T000000Z
// SUMMARY:Summary
// LOCATION:Location
// URL;VALUE=URI:http://example.com
// END:VEVENT
// END:VCALENDAR
expect(email.alternatives[0].content).toContain("BEGIN:VCALENDAR")
expect(email.alternatives[0].content).toContain("BEGIN:VEVENT")
expect(email.alternatives[0].content).toContain("UID:")
expect(email.alternatives[0].content).toContain("SEQUENCE:0")
expect(email.alternatives[0].content).toContain("SUMMARY:Summary")
expect(email.alternatives[0].content).toContain("LOCATION:Location")
expect(email.alternatives[0].content).toContain(
"URL;VALUE=URI:http://example.com"
)
expect(email.alternatives[0].content).toContain("END:VEVENT")
expect(email.alternatives[0].content).toContain("END:VCALENDAR")
const formatDate = (date: Date) =>
date.toISOString().replace(/[-:]/g, "").split(".")[0] + "Z"
expect(email.alternatives[0].content).toContain(
`DTSTAMP:${formatDate(startTime)}`
)
expect(email.alternatives[0].content).toContain(
`DTSTART:${formatDate(startTime)}`
)
expect(email.alternatives[0].content).toContain(
`DTEND:${formatDate(endTime)}`
)
})
})

View File

@ -1,108 +0,0 @@
jest.unmock("node-fetch")
import { TestConfiguration } from "../../../../tests"
import { objectStore } from "@budibase/backend-core"
import { helpers } from "@budibase/shared-core"
import tk from "timekeeper"
import { EmailAttachment, EmailTemplatePurpose } from "@budibase/types"
const fetch = require("node-fetch")
const nodemailer = require("nodemailer")
// for the real email tests give them a long time to try complete/fail
jest.setTimeout(30000)
describe("/api/global/email", () => {
const config = new TestConfiguration()
beforeAll(async () => {
tk.reset()
await config.beforeAll()
})
afterAll(async () => {
await config.afterAll()
})
async function sendRealEmail(
purpose: string,
attachments?: EmailAttachment[]
) {
let response, text
try {
await helpers.withTimeout(20000, () => config.saveEtherealSmtpConfig())
await helpers.withTimeout(20000, () => config.saveSettingsConfig())
let res
if (attachments) {
res = await config.api.emails
.sendEmail(purpose, attachments)
.timeout(20000)
} else {
res = await config.api.emails.sendEmail(purpose).timeout(20000)
}
// ethereal hiccup, can't test right now
if (res.status >= 300) {
return
}
expect(res.body.message).toBeDefined()
const testUrl = nodemailer.getTestMessageUrl(res.body)
expect(testUrl).toBeDefined()
response = await fetch(testUrl)
text = await response.text()
} catch (err: any) {
// ethereal hiccup, can't test right now
if (parseInt(err.status) >= 300 || (err && err.errno === "ETIME")) {
return
} else {
throw err
}
}
let toCheckFor
switch (purpose) {
case EmailTemplatePurpose.WELCOME:
toCheckFor = `Thanks for getting started with Budibase's Budibase platform.`
break
case EmailTemplatePurpose.INVITATION:
toCheckFor = `Use the button below to set up your account and get started:`
break
case EmailTemplatePurpose.PASSWORD_RECOVERY:
toCheckFor = `You recently requested to reset your password for your Budibase account in your Budibase platform`
break
}
expect(text).toContain(toCheckFor)
}
it("should be able to send a welcome email", async () => {
await sendRealEmail(EmailTemplatePurpose.WELCOME)
})
it("should be able to send a invitation email", async () => {
await sendRealEmail(EmailTemplatePurpose.INVITATION)
})
it("should be able to send a password recovery email", async () => {
await sendRealEmail(EmailTemplatePurpose.PASSWORD_RECOVERY)
})
it("should be able to send an email with attachments", async () => {
let bucket = "testbucket"
let filename = "test.txt"
await objectStore.upload({
bucket,
filename,
body: Buffer.from("test data"),
})
let presignedUrl = await objectStore.getPresignedUrl(
bucket,
filename,
60000
)
let attachmentObject = {
url: presignedUrl,
filename,
}
await sendRealEmail(EmailTemplatePurpose.WELCOME, [attachmentObject])
})
})

View File

@ -32,6 +32,8 @@ import {
AuthToken,
SCIMConfig,
ConfigType,
SMTPConfig,
SMTPInnerConfig,
} from "@budibase/types"
import API from "./api"
import jwt, { Secret } from "jsonwebtoken"
@ -348,9 +350,15 @@ class TestConfiguration {
// CONFIGS - SMTP
async saveSmtpConfig() {
async saveSmtpConfig(config?: SMTPInnerConfig) {
await this.deleteConfig(Config.SMTP)
await this._req(structures.configs.smtp(), null, controllers.config.save)
let smtpConfig: SMTPConfig = structures.configs.smtp()
if (config) {
smtpConfig = { type: ConfigType.SMTP, config }
}
await this._req(smtpConfig, null, controllers.config.save)
}
async saveEtherealSmtpConfig() {

View File

@ -1,19 +1,18 @@
import { EmailAttachment } from "@budibase/types"
import { SendEmailRequest, SendEmailResponse } from "@budibase/types"
import { TestAPI } from "./base"
export class EmailAPI extends TestAPI {
sendEmail = (purpose: string, attachments?: EmailAttachment[]) => {
return this.request
sendEmail = async (
req: SendEmailRequest,
expectations?: { status?: number }
): Promise<SendEmailResponse> => {
const res = await this.request
.post(`/api/global/email/send`)
.send({
email: "test@example.com",
attachments,
purpose,
tenantId: this.config.getTenantId(),
userId: this.config.user!._id!,
})
.send(req)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
.expect(expectations?.status || 200)
return res.body as SendEmailResponse
}
}

View File

@ -1,3 +1,10 @@
import MailDev from "maildev"
import { promisify } from "util"
import TestConfiguration from "../TestConfiguration"
/**
* @deprecated please use the `MailDev` email server instead of this mock.
*/
export function mock() {
// mock the email system
const sendMailMock = jest.fn()
@ -8,3 +15,170 @@ export function mock() {
})
return sendMailMock
}
export type Mailserver = InstanceType<typeof MailDev>
export type MailserverConfig = ConstructorParameters<typeof MailDev>[0]
export interface Attachment {
checksum: string
contentId: string
contentType: string
fileName: string
generatedFileName: string
length: number
transferEncoding: string
transformed: boolean
}
export interface Address {
address: string
args?: boolean
name?: string
}
export interface Alternative {
contentType: string
content: string
charset: string
method: string
transferEncoding: string
}
export interface Envelope {
from: Address
to: Address[]
host: string
remoteAddress: string
}
export interface Email {
attachments: Attachment[]
alternatives: Alternative[]
calculatedBcc: Address[]
cc: Address[]
date: string
envelope: Envelope
from: Address[]
headers: Record<string, string>
html: string
id: string
messageId: string
priority: string
read: boolean
size: number
sizeHuman: string
source: string
time: Date
to: Address[]
}
export function getUnusedPort(): Promise<number> {
return new Promise((resolve, reject) => {
const server = require("net").createServer()
server.unref()
server.on("error", reject)
server.listen(0, () => {
const port = server.address().port
server.close(() => {
resolve(port)
})
})
})
}
export async function captureEmail(
mailserver: Mailserver,
f: () => Promise<void>
): Promise<Email> {
const timeoutMs = 5000
let timeout: ReturnType<typeof setTimeout> | undefined = undefined
const cancel = () => {
if (timeout) {
clearTimeout(timeout)
timeout = undefined
}
}
const timeoutPromise = new Promise<never>((_, reject) => {
timeout = setTimeout(() => {
reject(new Error("Timed out waiting for email"))
}, timeoutMs)
})
const mailPromise = new Promise<Email>(resolve => {
// @ts-expect-error - types are wrong
mailserver.once("new", email => {
resolve(email as Email)
cancel()
})
})
const emailPromise = Promise.race([mailPromise, timeoutPromise])
try {
await f()
} finally {
cancel()
}
return await emailPromise
}
export async function startMailserver(
config: TestConfiguration,
opts?: MailserverConfig
): Promise<Mailserver> {
if (!opts) {
opts = {}
}
if (!opts.smtp) {
opts.smtp = await getUnusedPort()
}
const mailserver = new MailDev(opts || {})
await new Promise((resolve, reject) => {
mailserver.listen(err => {
if (err) {
return reject(err)
}
resolve(mailserver)
})
})
await config.saveSmtpConfig({
host: "localhost",
port: opts.smtp,
secure: false,
from: "test@example.com",
})
return mailserver
}
export function deleteAllEmail(mailserver: Mailserver) {
return promisify(mailserver.deleteAllEmail).bind(mailserver)()
}
export function stopMailserver(mailserver: Mailserver) {
return promisify(mailserver.close).bind(mailserver)()
}
export function getAttachment(
mailserver: Mailserver,
email: Email,
attachment: Attachment
) {
return new Promise<string>(resolve => {
// @ts-expect-error - types are wrong
mailserver.getEmailAttachment(
email.id,
attachment.generatedFileName,
(err: any, _contentType: string, stream: ReadableStream) => {
if (err) {
throw err
}
resolve(new Response(stream).text())
}
)
})
}
export function getAttachments(mailserver: Mailserver, email: Email) {
return Promise.all(
email.attachments.map(attachment =>
getAttachment(mailserver, email, attachment)
)
)
}

View File

@ -4,16 +4,17 @@ import { getTemplateByPurpose, EmailTemplates } from "../constants/templates"
import { getSettingsTemplateContext } from "./templates"
import { processString } from "@budibase/string-templates"
import {
User,
SendEmailOpts,
SMTPInnerConfig,
EmailTemplatePurpose,
User,
} from "@budibase/types"
import { configs, cache, objectStore } from "@budibase/backend-core"
import { configs, cache, objectStore, HTTPError } from "@budibase/backend-core"
import ical from "ical-generator"
import _ from "lodash"
const nodemailer = require("nodemailer")
import nodemailer from "nodemailer"
import SMTPTransport from "nodemailer/lib/smtp-transport"
const TEST_MODE = env.ENABLE_EMAIL_TEST_MODE && env.isDev()
const TYPE = TemplateType.EMAIL
@ -26,7 +27,7 @@ const FULL_EMAIL_PURPOSES = [
]
function createSMTPTransport(config?: SMTPInnerConfig) {
let options: any
let options: SMTPTransport.Options
let secure = config?.secure
// default it if not specified
if (secure == null) {
@ -59,22 +60,6 @@ function createSMTPTransport(config?: SMTPInnerConfig) {
return nodemailer.createTransport(options)
}
async function getLinkCode(
purpose: EmailTemplatePurpose,
email: string,
user: User,
info: any = null
) {
switch (purpose) {
case EmailTemplatePurpose.PASSWORD_RECOVERY:
return cache.passwordReset.createCode(user._id!, info)
case EmailTemplatePurpose.INVITATION:
return cache.invite.createCode(email, info)
default:
return null
}
}
/**
* Builds an email using handlebars and the templates found in the system (default or otherwise).
* @param purpose the purpose of the email being built, e.g. invitation, password reset.
@ -87,8 +72,8 @@ async function getLinkCode(
async function buildEmail(
purpose: EmailTemplatePurpose,
email: string,
context: any,
{ user, contents }: any = {}
context: Record<string, any>,
{ user, contents }: { user?: User; contents?: string } = {}
) {
// this isn't a full email
if (FULL_EMAIL_PURPOSES.indexOf(purpose) === -1) {
@ -106,8 +91,8 @@ async function buildEmail(
throw "Unable to build email, missing base components"
}
let name = user ? user.name : undefined
if (user && !name && user.firstName) {
let name: string | undefined
if (user && user.firstName) {
name = user.lastName ? `${user.firstName} ${user.lastName}` : user.firstName
}
context = {
@ -158,10 +143,21 @@ export async function sendEmail(
}
const transport = createSMTPTransport(config)
// if there is a link code needed this will retrieve it
const code = await getLinkCode(purpose, email, opts.user, opts?.info)
let code: string | null = null
switch (purpose) {
case EmailTemplatePurpose.PASSWORD_RECOVERY:
if (!opts.user || !opts.user._id) {
throw new HTTPError("User must be provided for password recovery.", 400)
}
code = await cache.passwordReset.createCode(opts.user._id, opts.info)
break
case EmailTemplatePurpose.INVITATION:
code = await cache.invite.createCode(email, opts.info)
break
}
let context = await getSettingsTemplateContext(purpose, code)
let message: any = {
let message: Parameters<typeof transport.sendMail>[0] = {
from: opts?.from || config?.from,
html: await buildEmail(purpose, email, context, {
user: opts?.user,

544
yarn.lock

File diff suppressed because it is too large Load Diff