Merge branch 'master' into fix/custom-branding-issue

This commit is contained in:
Peter Clement 2024-02-09 16:06:26 +00:00 committed by GitHub
commit 8857ca989f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
124 changed files with 2719 additions and 1710 deletions

View File

@ -10,4 +10,5 @@ packages/builder/.routify
packages/sdk/sdk
packages/account-portal/packages/server/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/ui/build
packages/account-portal/packages/ui/build
**/*.ivm.bundle.js

View File

@ -43,7 +43,8 @@
"no-useless-escape": "off",
"no-undef": "off",
"no-prototype-builtins": "off",
"local-rules/no-budibase-imports": "error"
"local-rules/no-budibase-imports": "error",
"local-rules/no-test-com": "error"
}
},
{
@ -53,7 +54,7 @@
"packages/frontend-core/**/*"
],
"rules": {
"no-console": ["error", { "allow": ["warn", "error", "debug"] } ]
"no-console": ["error", { "allow": ["warn", "error", "debug"] }]
}
}
],

View File

@ -11,4 +11,5 @@ packages/sdk/sdk
packages/pro/coverage
packages/account-portal/packages/ui/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/server/build
packages/account-portal/packages/server/build
**/*.ivm.bundle.js

View File

@ -18,4 +18,37 @@ module.exports = {
}
},
},
"no-test-com": {
meta: {
type: "problem",
docs: {
description:
"disallow the use of 'test.com' in strings and replace it with 'example.com'",
category: "Possible Errors",
recommended: false,
},
schema: [], // no options
fixable: "code", // Indicates that this rule supports automatic fixing
},
create: function (context) {
return {
Literal(node) {
if (
typeof node.value === "string" &&
node.value.includes("test.com")
) {
context.report({
node,
message:
"test.com is a privately owned domain and could point anywhere, use example.com instead.",
fix: function (fixer) {
const newText = node.raw.replace(/test\.com/g, "example.com")
return fixer.replaceText(node, newText)
},
})
}
},
}
},
},
}

View File

@ -124,6 +124,8 @@ HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh
# must set this just before running
ENV NODE_ENV=production
# this is required for isolated-vm to work on Node 20+
ENV NODE_OPTIONS="--no-node-snapshot"
WORKDIR /
CMD ["./runner.sh"]

View File

@ -1,5 +1,5 @@
{
"version": "2.17.3",
"version": "2.18.2",
"npmClient": "yarn",
"packages": [
"packages/*",

View File

@ -97,7 +97,17 @@
"@budibase/backend-core": "0.0.0",
"@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0",
"@budibase/types": "0.0.0"
"@budibase/types": "0.0.0",
"tough-cookie": "4.1.3",
"node-fetch": "2.6.7",
"semver": "7.5.3",
"http-cache-semantics": "4.1.1",
"msgpackr": "1.10.1",
"axios": "1.6.3",
"xml2js": "0.6.2",
"unset-value": "2.0.1",
"got": "13.0.0",
"passport": "0.6.0"
},
"engines": {
"node": ">=20.0.0 <21.0.0"

View File

@ -25,19 +25,19 @@
"@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0",
"@techpass/passport-openidconnect": "0.3.2",
"@govtechsg/passport-openidconnect": "^1.0.2",
"aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bull": "4.10.1",
"correlation-id": "4.0.0",
"dd-trace": "5.0.0",
"dd-trace": "5.2.0",
"dotenv": "16.0.1",
"ioredis": "5.3.2",
"joi": "17.6.0",
"jsonwebtoken": "9.0.2",
"koa-passport": "4.1.4",
"koa-passport": "^6.0.0",
"koa-pino-logger": "4.0.0",
"lodash": "4.17.21",
"node-fetch": "2.6.7",
@ -52,9 +52,9 @@
"redlock": "4.2.0",
"rotating-file-stream": "3.1.0",
"sanitize-s3-objectkey": "0.0.1",
"semver": "7.3.7",
"semver": "^7.5.4",
"tar-fs": "2.1.1",
"uuid": "8.3.2"
"uuid": "^8.3.2"
},
"devDependencies": {
"@shopify/jest-koa-mocks": "5.1.1",

View File

@ -1,5 +1,4 @@
import { IdentityContext } from "@budibase/types"
import { ExecutionTimeTracker } from "../timers"
import { IdentityContext, VM } from "@budibase/types"
// keep this out of Budibase types, don't want to expose context info
export type ContextMap = {
@ -10,5 +9,5 @@ export type ContextMap = {
isScim?: boolean
automationId?: string
isMigrating?: boolean
jsExecutionTracker?: ExecutionTimeTracker
vm?: VM
}

View File

@ -255,7 +255,8 @@ export async function listAllObjects(bucketName: string, path: string) {
objects = objects.concat(response.Contents)
}
isTruncated = !!response.IsTruncated
} while (isTruncated)
token = response.NextContinuationToken
} while (isTruncated && token)
return objects
}

View File

@ -2,7 +2,7 @@ import env from "../environment"
import { getRedisOptions } from "../redis/utils"
import { JobQueue } from "./constants"
import InMemoryQueue from "./inMemoryQueue"
import BullQueue, { QueueOptions } from "bull"
import BullQueue, { QueueOptions, JobOptions } from "bull"
import { addListeners, StalledFn } from "./listeners"
import { Duration } from "../utils"
import * as timers from "../timers"
@ -24,17 +24,24 @@ async function cleanup() {
export function createQueue<T>(
jobQueue: JobQueue,
opts: { removeStalledCb?: StalledFn } = {}
opts: {
removeStalledCb?: StalledFn
maxStalledCount?: number
jobOptions?: JobOptions
} = {}
): BullQueue.Queue<T> {
const redisOpts = getRedisOptions()
const queueConfig: QueueOptions = {
redis: redisOpts,
settings: {
maxStalledCount: 0,
maxStalledCount: opts.maxStalledCount ? opts.maxStalledCount : 0,
lockDuration: QUEUE_LOCK_MS,
lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,
},
}
if (opts.jobOptions) {
queueConfig.defaultJobOptions = opts.jobOptions
}
let queue: any
if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig)

View File

@ -20,41 +20,3 @@ export function cleanup() {
}
intervals = []
}
export class ExecutionTimeoutError extends Error {
public readonly name = "ExecutionTimeoutError"
}
export class ExecutionTimeTracker {
static withLimit(limitMs: number) {
return new ExecutionTimeTracker(limitMs)
}
constructor(readonly limitMs: number) {}
private totalTimeMs = 0
track<T>(f: () => T): T {
this.checkLimit()
const start = process.hrtime.bigint()
try {
return f()
} finally {
const end = process.hrtime.bigint()
this.totalTimeMs += Number(end - start) / 1e6
this.checkLimit()
}
}
get elapsedMS() {
return this.totalTimeMs
}
checkLimit() {
if (this.totalTimeMs > this.limitMs) {
throw new ExecutionTimeoutError(
`Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`
)
}
}
}

View File

@ -44,11 +44,11 @@ describe("utils", () => {
it("gets appId from url", async () => {
await config.doInTenant(async () => {
const url = "http://test.com"
const url = "http://example.com"
env._set("PLATFORM_URL", url)
const ctx = structures.koa.newContext()
ctx.host = `${config.tenantId}.test.com`
ctx.host = `${config.tenantId}.example.com`
const expected = db.generateAppID(config.tenantId)
const app = structures.apps.app(expected)
@ -89,7 +89,7 @@ describe("utils", () => {
const ctx = structures.koa.newContext()
const expected = db.generateAppID()
ctx.request.headers = {
referer: `http://test.com/builder/app/${expected}/design/screen_123/screens`,
referer: `http://example.com/builder/app/${expected}/design/screen_123/screens`,
}
const actual = await utils.getAppIdFromCtx(ctx)
@ -100,7 +100,7 @@ describe("utils", () => {
const ctx = structures.koa.newContext()
const appId = db.generateAppID()
ctx.request.headers = {
referer: `http://test.com/foo/app/${appId}/bar`,
referer: `http://example.com/foo/app/${appId}/bar`,
}
const actual = await utils.getAppIdFromCtx(ctx)

View File

@ -3,5 +3,5 @@ import { v4 as uuid } from "uuid"
export { v4 as uuid } from "uuid"
export const email = () => {
return `${uuid()}@test.com`
return `${uuid()}@example.com`
}

View File

@ -61,7 +61,7 @@ export function ssoProfile(user?: User): SSOProfile {
},
_json: {
email: user.email,
picture: "http://test.com",
picture: "http://example.com",
},
provider: generator.string(),
}

View File

@ -25,7 +25,7 @@ export const user = (userProps?: Partial<Omit<User, "userId">>): User => {
roles: { app_test: "admin" },
firstName: generator.first(),
lastName: generator.last(),
pictureUrl: "http://test.com",
pictureUrl: "http://example.com",
tenantId: tenant.id(),
...userProps,
}

View File

@ -184,7 +184,7 @@
{#if environmentVariablesEnabled}
<div on:click={() => showModal()} class="add-variable">
<svg
class="spectrum-Icon spectrum-Icon--sizeS "
class="spectrum-Icon spectrum-Icon--sizeS"
focusable="false"
aria-hidden="true"
>
@ -195,7 +195,7 @@
{:else}
<div on:click={() => handleUpgradePanel()} class="add-variable">
<svg
class="spectrum-Icon spectrum-Icon--sizeS "
class="spectrum-Icon spectrum-Icon--sizeS"
focusable="false"
aria-hidden="true"
>

View File

@ -7,6 +7,9 @@ import {
findHBSBlocks,
} from "@budibase/string-templates"
import { capitalise } from "helpers"
import { Constants } from "@budibase/frontend-core"
const { ContextScopes } = Constants
/**
* Recursively searches for a specific component ID
@ -263,11 +266,59 @@ export const getComponentName = component => {
if (component == null) {
return ""
}
const components = get(store)?.components || {}
const componentDefinition = components[component._component] || {}
const name =
componentDefinition.friendlyName || componentDefinition.name || ""
return name
return componentDefinition.friendlyName || componentDefinition.name || ""
}
/**
* Recurses through the component tree and builds a tree of contexts provided
* by components.
*/
export const buildContextTree = (
rootComponent,
tree = { root: [] },
currentBranch = "root"
) => {
// Sanity check
if (!rootComponent) {
return tree
}
// Process this component's contexts
const def = store.actions.components.getDefinition(rootComponent._component)
if (def?.context) {
tree[currentBranch].push(rootComponent._id)
const contexts = Array.isArray(def.context) ? def.context : [def.context]
// If we provide local context, start a new branch for our children
if (contexts.some(context => context.scope === ContextScopes.Local)) {
currentBranch = rootComponent._id
tree[rootComponent._id] = []
}
}
// Process children
if (rootComponent._children) {
rootComponent._children.forEach(child => {
buildContextTree(child, tree, currentBranch)
})
}
return tree
}
/**
* Generates a lookup map of which context branch all components in a component
* tree are inside.
*/
export const buildContextTreeLookupMap = rootComponent => {
const tree = buildContextTree(rootComponent)
let map = {}
Object.entries(tree).forEach(([branch, ids]) => {
ids.forEach(id => {
map[id] = branch
})
})
return map
}

View File

@ -1,6 +1,7 @@
import { cloneDeep } from "lodash/fp"
import { get } from "svelte/store"
import {
buildContextTreeLookupMap,
findAllComponents,
findAllMatchingComponents,
findComponent,
@ -20,11 +21,13 @@ import {
encodeJSBinding,
} from "@budibase/string-templates"
import { TableNames } from "../constants"
import { JSONUtils } from "@budibase/frontend-core"
import { JSONUtils, Constants } from "@budibase/frontend-core"
import ActionDefinitions from "components/design/settings/controls/ButtonActionEditor/manifest.json"
import { environment, licensing } from "stores/portal"
import { convertOldFieldFormat } from "components/design/settings/controls/FieldConfiguration/utils"
const { ContextScopes } = Constants
// Regex to match all instances of template strings
const CAPTURE_VAR_INSIDE_TEMPLATE = /{{([^}]+)}}/g
const CAPTURE_VAR_INSIDE_JS = /\$\("([^")]+)"\)/g
@ -214,20 +217,27 @@ export const getComponentContexts = (
return []
}
let map = {}
const componentPath = findComponentPath(asset.props, componentId)
const componentPathIds = componentPath.map(component => component._id)
const contextTreeLookupMap = buildContextTreeLookupMap(asset.props)
// Processes all contexts exposed by a component
const processContexts = scope => component => {
const def = store.actions.components.getDefinition(component._component)
// Sanity check
const def = store.actions.components.getDefinition(component?._component)
if (!def?.context) {
return
}
if (!map[component._id]) {
map[component._id] = {
component,
definition: def,
contexts: [],
}
// Filter out global contexts not in the same branch.
// Global contexts are only valid if their branch root is an ancestor of
// this component.
const branch = contextTreeLookupMap[component._id]
if (branch !== "root" && !componentPathIds.includes(branch)) {
return
}
// Process all contexts provided by this component
const contexts = Array.isArray(def.context) ? def.context : [def.context]
contexts.forEach(context => {
// Ensure type matches
@ -235,7 +245,7 @@ export const getComponentContexts = (
return
}
// Ensure scope matches
let contextScope = context.scope || "global"
let contextScope = context.scope || ContextScopes.Global
if (contextScope !== scope) {
return
}
@ -243,17 +253,23 @@ export const getComponentContexts = (
if (!isContextCompatibleWithComponent(context, component)) {
return
}
if (!map[component._id]) {
map[component._id] = {
component,
definition: def,
contexts: [],
}
}
map[component._id].contexts.push(context)
})
}
// Process all global contexts
const allComponents = findAllComponents(asset.props)
allComponents.forEach(processContexts("global"))
allComponents.forEach(processContexts(ContextScopes.Global))
// Process all local contexts
const localComponents = findComponentPath(asset.props, componentId)
localComponents.forEach(processContexts("local"))
// Process all local contexts in the immediate tree
componentPath.forEach(processContexts(ContextScopes.Local))
// Exclude self if required
if (!options?.includeSelf) {

View File

@ -286,7 +286,13 @@ export const hbInsert = (value, from, to, text) => {
return parsedInsert
}
export function jsInsert(value, from, to, text, { helper, disableWrapping }) {
export function jsInsert(
value,
from,
to,
text,
{ helper, disableWrapping } = {}
) {
let parsedInsert = ""
const left = from ? value.substring(0, from) : ""

View File

@ -41,6 +41,7 @@
let autoSchema = {}
let rows = []
let keys = {}
const parseQuery = query => {
modified = false
@ -93,7 +94,13 @@
notifications.success("Query executed successfully")
} catch (error) {
notifications.error(`Query Error: ${error.message}`)
if (typeof error.message === "string") {
notifications.error(`Query Error: ${error.message}`)
} else if (typeof error.message?.code === "string") {
notifications.error(`Query Error: ${error.message.code}`)
} else {
notifications.error(`Query Error: ${JSON.stringify(error.message)}`)
}
if (!suppressErrors) {
throw error
@ -137,8 +144,20 @@
const handleScroll = e => {
scrolling = e.target.scrollTop !== 0
}
async function handleKeyDown(evt) {
keys[evt.key] = true
if ((keys["Meta"] || keys["Control"]) && keys["Enter"]) {
await runQuery({ suppressErrors: false })
}
}
function handleKeyUp(evt) {
delete keys[evt.key]
}
</script>
<svelte:window on:keydown={handleKeyDown} on:keyup={handleKeyUp} />
<QueryViewerSavePromptModal
checkIsModified={() => checkIsModified(newQuery)}
attemptSave={() => runQuery({ suppressErrors: false }).then(saveQuery)}

View File

@ -1,7 +1,20 @@
<script>
export let data
export let maxRowsToDisplay = 5
$: string = JSON.stringify(data || {}, null, 2)
let string
$: {
string = JSON.stringify(data || {}, null, 2)
if (Array.isArray(data) && data.length > maxRowsToDisplay) {
string = JSON.stringify(data.slice(0, maxRowsToDisplay) || {}, null, 2)
// Display '...' at the end of the array
string = string.replace(
/(}\n])/,
`},\n ...${data.length - maxRowsToDisplay} further items\n]`
)
}
}
</script>
<textarea class="json" disabled value={string} />

View File

@ -4,13 +4,17 @@
export let schema = {}
export let rows = []
export let maxRowsToDisplay = 5
$: rowsCopy = cloneDeep(rows)
let rowsToDisplay
$: rowsToDisplay = [...cloneDeep(rows).slice(0, maxRowsToDisplay)]
$: additionalRows = rows.length - maxRowsToDisplay
// Cast field in query preview response to number if specified by schema
$: {
for (let i = 0; i < rowsCopy.length; i++) {
let row = rowsCopy[i]
for (let i = 0; i < rowsToDisplay.length; i++) {
let row = rowsToDisplay[i]
for (let fieldName of Object.keys(schema)) {
if (schema[fieldName] === "number" && !isNaN(Number(row[fieldName]))) {
row[fieldName] = Number(row[fieldName])
@ -23,11 +27,27 @@
</script>
<div class="table">
<Table {schema} data={rowsCopy} allowEditing={false} />
<Table {schema} data={rowsToDisplay} allowEditing={false} />
{#if additionalRows > 0}
<div class="show-more">
...{additionalRows} further items
</div>
{/if}
</div>
<style>
.table :global(.spectrum-Table-cell) {
.table :global(.spectrum-Table-cell),
.show-more {
min-width: 100px;
}
.show-more {
display: flex;
padding: 16px;
justify-content: center;
background-color: var(--spectrum-global-color-gray-50);
border: 1px solid var(--spectrum-alias-border-color-mid);
border-top: 0;
}
</style>

View File

@ -41,7 +41,7 @@
</div>
<div class="content">
{#if activeTab === "JSON"}
<JSONPanel data={rows[0] || {}} />
<JSONPanel data={rows?.length === 1 ? rows[0] : rows || {}} />
{:else if activeTab === "Schema"}
<SchemaPanel {onSchemaChange} {schema} />
{:else}

View File

@ -12,12 +12,16 @@
import PromptQueryModal from "./_components/PromptQueryModal.svelte"
import SettingsPanel from "./_components/panels/Settings.svelte"
import { helpers } from "@budibase/shared-core"
import { admin } from "stores/portal"
import { IntegrationTypes } from "constants/backend"
let selectedPanel = null
let panelOptions = []
$: datasource = $datasources.selected
$: isCloud = $admin.cloud
$: isPostgres = datasource?.source === IntegrationTypes.POSTGRES
$: getOptions(datasource)
const getOptions = datasource => {
@ -41,7 +45,13 @@
}
// always the last option for SQL
if (helpers.isSQL(datasource)) {
panelOptions.push("Settings")
if (isCloud && isPostgres) {
// We don't show the settings panel for Postgres on Budicloud because
// it requires pg_dump to work and we don't want to enable shell injection
// attacks.
} else {
panelOptions.push("Settings")
}
}
}
</script>

View File

@ -15,10 +15,15 @@
Checkbox,
notifications,
Select,
Combobox,
} from "@budibase/bbui"
import { selectedScreen, store } from "builderStore"
import { DefaultAppTheme } from "constants"
$: screenRouteOptions = $store.screens
.map(screen => screen.routing?.route)
.filter(x => x != null)
const updateShowNavigation = async e => {
await store.actions.screens.updateSetting(
get(selectedScreen),
@ -107,23 +112,6 @@
on:change={e => update("navWidth", e.detail)}
/>
{/if}
<div class="label">
<Label size="M">Show logo</Label>
</div>
<Checkbox
value={!$store.navigation.hideLogo}
on:change={e => update("hideLogo", !e.detail)}
/>
{#if !$store.navigation.hideLogo}
<div class="label">
<Label size="M">Logo URL</Label>
</div>
<Input
value={$store.navigation.logoUrl}
on:change={e => update("logoUrl", e.detail)}
updateOnChange={false}
/>
{/if}
<div class="label">
<Label size="M">Show title</Label>
</div>
@ -160,6 +148,47 @@
/>
</div>
</div>
<div class="divider" />
<div class="customizeSection">
<div class="subheading">
<Detail>Logo</Detail>
</div>
<div class="controls">
<div class="label">
<Label size="M">Show logo</Label>
</div>
<Checkbox
value={!$store.navigation.hideLogo}
on:change={e => update("hideLogo", !e.detail)}
/>
{#if !$store.navigation.hideLogo}
<div class="label">
<Label size="M">Logo image URL</Label>
</div>
<Input
value={$store.navigation.logoUrl}
on:change={e => update("logoUrl", e.detail)}
updateOnChange={false}
/>
<div class="label">
<Label size="M">Logo link URL</Label>
</div>
<Combobox
value={$store.navigation.logoLinkUrl}
on:change={e => update("logoLinkUrl", e.detail)}
options={screenRouteOptions}
/>
<div class="label">
<Label size="M">New tab</Label>
</div>
<Checkbox
value={!!$store.navigation.openLogoLinkInNewTab}
on:change={e => update("openLogoLinkInNewTab", !!e.detail)}
/>
{/if}
</div>
</div>
{/if}
</Panel>

View File

@ -4720,7 +4720,8 @@
}
],
"context": {
"type": "schema"
"type": "schema",
"scope": "local"
}
},
"daterangepicker": {

View File

@ -33,6 +33,8 @@
export let navTextColor
export let navWidth
export let pageWidth
export let logoLinkUrl
export let openLogoLinkInNewTab
export let embedded = false
@ -150,6 +152,16 @@
}
return style
}
const getSanitizedUrl = (url, openInNewTab) => {
if (!isInternal(url)) {
return ensureExternal(url)
}
if (openInNewTab) {
return `#${url}`
}
return url
}
</script>
<div
@ -192,7 +204,23 @@
{/if}
<div class="logo">
{#if !hideLogo}
<img src={logoUrl || "/builder/bblogo.png"} alt={title} />
{#if logoLinkUrl && isInternal(logoLinkUrl) && !openLogoLinkInNewTab}
<a
href={getSanitizedUrl(logoLinkUrl, openLogoLinkInNewTab)}
use:linkable
>
<img src={logoUrl || "/builder/bblogo.png"} alt={title} />
</a>
{:else if logoLinkUrl}
<a
target={openLogoLinkInNewTab ? "_blank" : "_self"}
href={getSanitizedUrl(logoLinkUrl, openLogoLinkInNewTab)}
>
<img src={logoUrl || "/builder/bblogo.png"} alt={title} />
</a>
{:else}
<img src={logoUrl || "/builder/bblogo.png"} alt={title} />
{/if}
{/if}
{#if !hideTitle && title}
<Heading size="S">{title}</Heading>

View File

@ -2,7 +2,9 @@
import { getContext } from "svelte"
import Placeholder from "./Placeholder.svelte"
import Container from "./Container.svelte"
import { ContextScopes } from "constants"
const { Provider, ContextScopes } = getContext("sdk")
const component = getContext("component")
export let dataProvider
export let noRowsMessage
@ -12,9 +14,6 @@
export let gap
export let scope = ContextScopes.Local
const { Provider } = getContext("sdk")
const component = getContext("component")
$: rows = dataProvider?.rows ?? []
$: loaded = dataProvider?.loaded ?? true
</script>

View File

@ -3,9 +3,9 @@
export let row
const { Provider } = getContext("sdk")
const { Provider, ContextScopes } = getContext("sdk")
</script>
<Provider data={row}>
<Provider data={row} scope={ContextScopes.Local}>
<slot />
</Provider>

View File

@ -1,9 +1,11 @@
<script>
import { getContext, setContext, onDestroy } from "svelte"
import { dataSourceStore, createContextStore } from "stores"
import { ActionTypes, ContextScopes } from "constants"
import { ActionTypes } from "constants"
import { generate } from "shortid"
const { ContextScopes } = getContext("sdk")
export let data
export let actions
export let key
@ -33,7 +35,7 @@
const provideData = newData => {
const dataKey = JSON.stringify(newData)
if (dataKey !== lastDataKey) {
context.actions.provideData(providerKey, newData, scope)
context.actions.provideData(providerKey, newData)
lastDataKey = dataKey
}
}
@ -43,7 +45,7 @@
if (actionsKey !== lastActionsKey) {
lastActionsKey = actionsKey
newActions?.forEach(({ type, callback, metadata }) => {
context.actions.provideAction(providerKey, type, callback, scope)
context.actions.provideAction(providerKey, type, callback)
// Register any "refresh datasource" actions with a singleton store
// so we can easily refresh data at all levels for any datasource

View File

@ -12,10 +12,5 @@ export const ActionTypes = {
ScrollTo: "ScrollTo",
}
export const ContextScopes = {
Local: "local",
Global: "global",
}
export const DNDPlaceholderID = "dnd-placeholder"
export const ScreenslotType = "screenslot"

View File

@ -23,12 +23,12 @@ import { getAction } from "utils/getAction"
import Provider from "components/context/Provider.svelte"
import Block from "components/Block.svelte"
import BlockComponent from "components/BlockComponent.svelte"
import { ActionTypes, ContextScopes } from "./constants"
import { ActionTypes } from "./constants"
import { fetchDatasourceSchema } from "./utils/schema.js"
import { getAPIKey } from "./utils/api.js"
import { enrichButtonActions } from "./utils/buttonActions.js"
import { processStringSync, makePropSafe } from "@budibase/string-templates"
import { fetchData, LuceneUtils } from "@budibase/frontend-core"
import { fetchData, LuceneUtils, Constants } from "@budibase/frontend-core"
export default {
API,
@ -57,7 +57,7 @@ export default {
fetchDatasourceSchema,
fetchData,
LuceneUtils,
ContextScopes,
ContextScopes: Constants.ContextScopes,
getAPIKey,
enrichButtonActions,
processStringSync,

View File

@ -1,5 +1,4 @@
import { writable, derived } from "svelte/store"
import { ContextScopes } from "constants"
export const createContextStore = parentContext => {
const context = writable({})
@ -20,60 +19,34 @@ export const createContextStore = parentContext => {
}
// Provide some data in context
const provideData = (providerId, data, scope = ContextScopes.Global) => {
const provideData = (providerId, data) => {
if (!providerId || data === undefined) {
return
}
// Proxy message up the chain if we have a parent and are providing global
// context
if (scope === ContextScopes.Global && parentContext) {
parentContext.actions.provideData(providerId, data, scope)
}
// Otherwise this is either the context root, or we're providing a local
// context override, so we need to update the local context instead
else {
context.update(state => {
state[providerId] = data
return state
})
broadcastChange(providerId)
}
context.update(state => {
state[providerId] = data
return state
})
broadcastChange(providerId)
}
// Provides some action in context
const provideAction = (
providerId,
actionType,
callback,
scope = ContextScopes.Global
) => {
const provideAction = (providerId, actionType, callback) => {
if (!providerId || !actionType) {
return
}
// Proxy message up the chain if we have a parent and are providing global
// context
if (scope === ContextScopes.Global && parentContext) {
parentContext.actions.provideAction(
providerId,
actionType,
callback,
scope
)
}
// Otherwise this is either the context root, or we're providing a local
// context override, so we need to update the local context instead
else {
const key = `${providerId}_${actionType}`
context.update(state => {
state[key] = callback
return state
})
broadcastChange(key)
}
const key = `${providerId}_${actionType}`
context.update(state => {
state[key] = callback
return state
})
broadcastChange(key)
}
const observeChanges = callback => {

View File

@ -106,3 +106,8 @@ export const Themes = [
export const EventPublishType = {
ENV_VAR_UPGRADE_PANEL_OPENED: "environment_variable_upgrade_panel_opened",
}
export const ContextScopes = {
Local: "local",
Global: "global",
}

@ -1 +1 @@
Subproject commit aaf7101cd1493215155cc8f83124c70d53eb1be4
Subproject commit 336bf2184cf632fdc2bffbad5628e8b15dd381bd

View File

@ -82,6 +82,8 @@ EXPOSE 4001
# due to this causing yarn to stop installing dev dependencies
# which are actually needed to get this environment up and running
ENV NODE_ENV=production
# this is required for isolated-vm to work on Node 20+
ENV NODE_OPTIONS="--no-node-snapshot"
ENV CLUSTER_MODE=${CLUSTER_MODE}
ENV TOP_LEVEL_PATH=/app

View File

@ -11,7 +11,7 @@ module SendgridMock {
}
async send(msg: any) {
if (msg.to === "invalid@test.com") {
if (msg.to === "invalid@example.com") {
throw "Invalid"
}
return msg

View File

@ -60,7 +60,7 @@ module AwsMock {
// @ts-ignore
this.getSignedUrl = (operation, params) => {
return `http://test.com/${params.Bucket}/${params.Key}`
return `http://example.com/${params.Bucket}/${params.Key}`
}
// @ts-ignore

View File

@ -36,8 +36,8 @@ module FetchMock {
if (url.includes("/api/global")) {
const user = {
email: "test@test.com",
_id: "us_test@test.com",
email: "test@example.com",
_id: "us_test@example.com",
status: "active",
roles: {},
builder: {
@ -58,7 +58,7 @@ module FetchMock {
url: "/app1",
},
})
} else if (url.includes("test.com")) {
} else if (url.includes("example.com")) {
return json({
body: opts.body,
url,

View File

@ -8,6 +8,6 @@
"../string-templates"
],
"ext": "js,ts,json,svelte",
"ignore": ["src/**/*.spec.ts", "src/**/*.spec.js", "../*/dist/**/*"],
"exec": "yarn build && node ./dist/index.js"
"ignore": ["**/*.spec.ts", "**/*.spec.js", "../*/dist/**/*"],
"exec": "yarn build && node --no-node-snapshot ./dist/index.js"
}

View File

@ -13,8 +13,12 @@
"build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=esm --external:handlebars",
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=esm",
"build:isolated-vm-libs": "yarn build:isolated-vm-lib:string-templates && yarn build:isolated-vm-lib:bson",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"jest": "NODE_OPTIONS=\"--no-node-snapshot $NODE_OPTIONS\" jest",
"test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch",
@ -49,8 +53,8 @@
"@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0",
"@budibase/types": "0.0.0",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@bull-board/api": "5.10.2",
"@bull-board/koa": "5.10.2",
"@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8",
@ -65,14 +69,15 @@
"cookies": "0.8.0",
"csvtojson": "2.0.10",
"curlconverter": "3.21.0",
"dd-trace": "5.0.0",
"dd-trace": "5.2.0",
"dotenv": "8.2.0",
"form-data": "4.0.0",
"global-agent": "3.0.0",
"google-auth-library": "7.12.0",
"google-spreadsheet": "3.2.0",
"ioredis": "5.3.2",
"jimp": "0.16.1",
"isolated-vm": "^4.7.2",
"jimp": "0.22.10",
"joi": "17.6.0",
"js-yaml": "4.1.0",
"jsonschema": "1.4.0",
@ -85,7 +90,7 @@
"koa2-ratelimit": "1.1.1",
"lodash": "4.17.21",
"memorystream": "0.3.1",
"mongodb": "5.7",
"mongodb": "^6.3.0",
"mssql": "10.0.1",
"mysql2": "3.5.2",
"node-fetch": "2.6.7",
@ -104,9 +109,10 @@
"svelte": "^3.49.0",
"tar": "6.1.15",
"to-json-schema": "0.2.5",
"uuid": "3.3.2",
"undici": "^6.0.1",
"undici-types": "^6.0.1",
"uuid": "^8.3.2",
"validate.js": "0.13.1",
"vm2": "^3.9.19",
"worker-farm": "1.7.0",
"xml2js": "0.5.0"
},
@ -129,6 +135,7 @@
"@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.14",
"@types/tar": "6.1.5",
"@types/uuid": "8.3.4",
"apidoc": "0.50.4",
"copyfiles": "2.4.1",
"docker-compose": "0.23.17",

View File

@ -0,0 +1,24 @@
# Use root/example as user/password credentials
version: "3.1"
services:
mongo:
image: mongo
restart: always
ports:
- 27017:27017
environment:
MONGO_INITDB_ROOT_USERNAME: root
MONGO_INITDB_ROOT_PASSWORD: example
mongo-express:
image: mongo-express
restart: always
ports:
- 8081:8081
environment:
ME_CONFIG_MONGODB_ADMINUSERNAME: root
ME_CONFIG_MONGODB_ADMINPASSWORD: example
ME_CONFIG_MONGODB_AUTH_USERNAME: admin
ME_CONFIG_MONGODB_AUTH_PASSWORD: pass
ME_CONFIG_MONGODB_URL: mongodb://root:example@mongo:27017/

View File

@ -3,12 +3,12 @@ set -e
if [[ -n $CI ]]
then
# Running in ci, where resources are limited
export NODE_OPTIONS="--max-old-space-size=4096"
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot"
echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
else
# --maxWorkers performs better in development
export NODE_OPTIONS="--no-node-snapshot"
echo "jest --coverage --maxWorkers=2 --forceExit $@"
jest --coverage --maxWorkers=2 --forceExit $@
fi

View File

@ -367,7 +367,7 @@
"value": {
"data": {
"_id": "us_693a73206518477283a8d5ae31103252",
"email": "test@test.com",
"email": "test@example.com",
"roles": {
"app_957b12f943d348faa61db7e18e088d0f": "BASIC"
},
@ -397,7 +397,7 @@
"data": [
{
"_id": "us_693a73206518477283a8d5ae31103252",
"email": "test@test.com",
"email": "test@example.com",
"roles": {
"app_957b12f943d348faa61db7e18e088d0f": "BASIC"
},

View File

@ -256,7 +256,7 @@ components:
value:
data:
_id: us_693a73206518477283a8d5ae31103252
email: test@test.com
email: test@example.com
roles:
app_957b12f943d348faa61db7e18e088d0f: BASIC
builder:
@ -278,7 +278,7 @@ components:
value:
data:
- _id: us_693a73206518477283a8d5ae31103252
email: test@test.com
email: test@example.com
roles:
app_957b12f943d348faa61db7e18e088d0f: BASIC
builder:

View File

@ -3,7 +3,7 @@ import Resource from "./utils/Resource"
const user = {
_id: "us_693a73206518477283a8d5ae31103252",
email: "test@test.com",
email: "test@example.com",
roles: {
app_957b12f943d348faa61db7e18e088d0f: "BASIC",
},

View File

@ -15,6 +15,9 @@ import {
SessionCookie,
QuerySchema,
FieldType,
type ExecuteQueryRequest,
type ExecuteQueryResponse,
type Row,
} from "@budibase/types"
import { ValidQueryNameRegex } from "@budibase/shared-core"
@ -223,7 +226,7 @@ export async function preview(ctx: UserCtx) {
}
async function execute(
ctx: UserCtx,
ctx: UserCtx<ExecuteQueryRequest, ExecuteQueryResponse | Row[]>,
opts: any = { rowsOnly: false, isAutomation: false }
) {
const db = context.getAppDB()

View File

@ -1,12 +1,12 @@
import ScriptRunner from "../../utilities/scriptRunner"
import { BBContext } from "@budibase/types"
import { Ctx } from "@budibase/types"
export async function execute(ctx: BBContext) {
export async function execute(ctx: Ctx) {
const { script, context } = ctx.request.body
const runner = new ScriptRunner(script, context)
ctx.body = runner.execute()
}
export async function save(ctx: BBContext) {
export async function save(ctx: Ctx) {
ctx.throw(501, "Not currently implemented")
}

View File

@ -3,7 +3,7 @@ import { InvalidFileExtensions } from "@budibase/shared-core"
require("svelte/register")
import { join } from "../../../utilities/centralPath"
import uuid from "uuid"
import * as uuid from "uuid"
import { ObjectStoreBuckets } from "../../../constants"
import { processString } from "@budibase/string-templates"
import {

View File

@ -12,7 +12,7 @@ describe("/api/applications/:appId/sync", () => {
app = await config.init()
// create some users which we will use throughout the tests
await config.createUser({
email: "sync1@test.com",
email: "sync1@example.com",
roles: {
[app._id!]: roles.BUILTIN_ROLE_IDS.BASIC,
},

View File

@ -77,7 +77,7 @@ describe("/datasources", () => {
const { datasource, query } = await config.dynamicVariableDatasource()
// preview once to cache variables
await preview(datasource, {
path: "www.test.com",
path: "www.example.com",
queryString: "test={{ variable3 }}",
})
// check variables in cache

View File

@ -0,0 +1,393 @@
import { Datasource, Query } from "@budibase/types"
import * as setup from "../utilities"
import { databaseTestProviders } from "../../../../integrations/tests/utils"
import { MongoClient, type Collection, BSON } from "mongodb"
jest.unmock("mongodb")
const collection = "test_collection"
const expectValidId = expect.stringMatching(/^\w{24}$/)
const expectValidBsonObjectId = expect.any(BSON.ObjectId)
describe("/queries", () => {
let config = setup.getConfig()
let datasource: Datasource
async function createQuery(query: Partial<Query>): Promise<Query> {
const defaultQuery: Query = {
datasourceId: datasource._id!,
name: "New Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: "return data",
readable: true,
}
const combinedQuery = { ...defaultQuery, ...query }
if (
combinedQuery.fields &&
combinedQuery.fields.extra &&
!combinedQuery.fields.extra.collection
) {
combinedQuery.fields.extra.collection = collection
}
return await config.api.query.create(combinedQuery)
}
async function withClient(
callback: (client: MongoClient) => Promise<void>
): Promise<void> {
const ds = await databaseTestProviders.mongodb.datasource()
const client = new MongoClient(ds.config!.connectionString)
await client.connect()
try {
await callback(client)
} finally {
await client.close()
}
}
async function withCollection(
callback: (collection: Collection) => Promise<void>
): Promise<void> {
await withClient(async client => {
const db = client.db(
(await databaseTestProviders.mongodb.datasource()).config!.db
)
await callback(db.collection(collection))
})
}
afterAll(async () => {
await databaseTestProviders.mongodb.stop()
setup.afterAll()
})
beforeAll(async () => {
await config.init()
datasource = await config.api.datasource.create(
await databaseTestProviders.mongodb.datasource()
)
})
beforeEach(async () => {
await withCollection(async collection => {
await collection.insertMany([
{ name: "one" },
{ name: "two" },
{ name: "three" },
{ name: "four" },
{ name: "five" },
])
})
})
afterEach(async () => {
await withCollection(async collection => {
await collection.drop()
})
})
it("should execute a count query", async () => {
const query = await createQuery({
fields: {
json: {},
extra: {
actionType: "count",
},
},
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([{ value: 5 }])
})
it("should execute a count query with a transformer", async () => {
const query = await createQuery({
fields: {
json: {},
extra: {
actionType: "count",
},
},
transformer: "return data + 1",
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([{ value: 6 }])
})
it("should execute a find query", async () => {
const query = await createQuery({
fields: {
json: {},
extra: {
actionType: "find",
},
},
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{ _id: expectValidId, name: "one" },
{ _id: expectValidId, name: "two" },
{ _id: expectValidId, name: "three" },
{ _id: expectValidId, name: "four" },
{ _id: expectValidId, name: "five" },
])
})
it("should execute a findOne query", async () => {
const query = await createQuery({
fields: {
json: {},
extra: {
actionType: "findOne",
},
},
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
})
it("should execute a findOneAndUpdate query", async () => {
const query = await createQuery({
fields: {
json: {
filter: { name: { $eq: "one" } },
update: { $set: { name: "newName" } },
},
extra: {
actionType: "findOneAndUpdate",
},
},
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{
lastErrorObject: { n: 1, updatedExisting: true },
ok: 1,
value: { _id: expectValidId, name: "one" },
},
])
await withCollection(async collection => {
expect(await collection.countDocuments()).toBe(5)
const doc = await collection.findOne({ name: { $eq: "newName" } })
expect(doc).toEqual({
_id: expectValidBsonObjectId,
name: "newName",
})
})
})
it("should execute a distinct query", async () => {
const query = await createQuery({
fields: {
json: "name",
extra: {
actionType: "distinct",
},
},
})
const result = await config.api.query.execute(query._id!)
const values = result.data.map(o => o.value).sort()
expect(values).toEqual(["five", "four", "one", "three", "two"])
})
it("should execute a create query with parameters", async () => {
const query = await createQuery({
fields: {
json: { foo: "{{ foo }}" },
extra: {
actionType: "insertOne",
},
},
queryVerb: "create",
parameters: [
{
name: "foo",
default: "default",
},
],
})
const result = await config.api.query.execute(query._id!, {
parameters: { foo: "bar" },
})
expect(result.data).toEqual([
{
acknowledged: true,
insertedId: expectValidId,
},
])
await withCollection(async collection => {
const doc = await collection.findOne({ foo: { $eq: "bar" } })
expect(doc).toEqual({
_id: expectValidBsonObjectId,
foo: "bar",
})
})
})
it("should execute a delete query with parameters", async () => {
const query = await createQuery({
fields: {
json: { name: { $eq: "{{ name }}" } },
extra: {
actionType: "deleteOne",
},
},
queryVerb: "delete",
parameters: [
{
name: "name",
default: "",
},
],
})
const result = await config.api.query.execute(query._id!, {
parameters: { name: "one" },
})
expect(result.data).toEqual([
{
acknowledged: true,
deletedCount: 1,
},
])
await withCollection(async collection => {
const doc = await collection.findOne({ name: { $eq: "one" } })
expect(doc).toBeNull()
})
})
it("should execute an update query with parameters", async () => {
const query = await createQuery({
fields: {
json: {
filter: { name: { $eq: "{{ name }}" } },
update: { $set: { name: "{{ newName }}" } },
},
extra: {
actionType: "updateOne",
},
},
queryVerb: "update",
parameters: [
{
name: "name",
default: "",
},
{
name: "newName",
default: "",
},
],
})
const result = await config.api.query.execute(query._id!, {
parameters: { name: "one", newName: "newOne" },
})
expect(result.data).toEqual([
{
acknowledged: true,
matchedCount: 1,
modifiedCount: 1,
upsertedCount: 0,
upsertedId: null,
},
])
await withCollection(async collection => {
const doc = await collection.findOne({ name: { $eq: "newOne" } })
expect(doc).toEqual({
_id: expectValidBsonObjectId,
name: "newOne",
})
const oldDoc = await collection.findOne({ name: { $eq: "one" } })
expect(oldDoc).toBeNull()
})
})
it("should be able to delete all records", async () => {
const query = await createQuery({
fields: {
json: {},
extra: {
actionType: "deleteMany",
},
},
queryVerb: "delete",
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{
acknowledged: true,
deletedCount: 5,
},
])
await withCollection(async collection => {
const docs = await collection.find().toArray()
expect(docs).toHaveLength(0)
})
})
it("should be able to update all documents", async () => {
const query = await createQuery({
fields: {
json: {
filter: {},
update: { $set: { name: "newName" } },
},
extra: {
actionType: "updateMany",
},
},
queryVerb: "update",
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{
acknowledged: true,
matchedCount: 5,
modifiedCount: 5,
upsertedCount: 0,
upsertedId: null,
},
])
await withCollection(async collection => {
const docs = await collection.find().toArray()
expect(docs).toHaveLength(5)
for (const doc of docs) {
expect(doc).toEqual({
_id: expectValidBsonObjectId,
name: "newName",
})
}
})
})
})

View File

@ -0,0 +1,170 @@
import { Datasource, Query } from "@budibase/types"
import * as setup from "../utilities"
import { databaseTestProviders } from "../../../../integrations/tests/utils"
import { Client } from "pg"
jest.unmock("pg")
const createTableSQL = `
CREATE TABLE test_table (
id serial PRIMARY KEY,
name VARCHAR ( 50 ) NOT NULL
);
`
const insertSQL = `
INSERT INTO test_table (name) VALUES ('one');
INSERT INTO test_table (name) VALUES ('two');
INSERT INTO test_table (name) VALUES ('three');
INSERT INTO test_table (name) VALUES ('four');
INSERT INTO test_table (name) VALUES ('five');
`
const dropTableSQL = `
DROP TABLE test_table;
`
describe("/queries", () => {
let config = setup.getConfig()
let datasource: Datasource
async function createQuery(query: Partial<Query>): Promise<Query> {
const defaultQuery: Query = {
datasourceId: datasource._id!,
name: "New Query",
parameters: [],
fields: {},
schema: {},
queryVerb: "read",
transformer: "return data",
readable: true,
}
return await config.api.query.create({ ...defaultQuery, ...query })
}
async function withClient(
callback: (client: Client) => Promise<void>
): Promise<void> {
const ds = await databaseTestProviders.postgres.datasource()
const client = new Client(ds.config!)
await client.connect()
try {
await callback(client)
} finally {
await client.end()
}
}
afterAll(async () => {
await databaseTestProviders.postgres.stop()
setup.afterAll()
})
beforeAll(async () => {
await config.init()
datasource = await config.api.datasource.create(
await databaseTestProviders.postgres.datasource()
)
})
beforeEach(async () => {
await withClient(async client => {
await client.query(createTableSQL)
await client.query(insertSQL)
})
})
afterEach(async () => {
await withClient(async client => {
await client.query(dropTableSQL)
})
})
it("should execute a query", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table ORDER BY id",
},
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{
id: 1,
name: "one",
},
{
id: 2,
name: "two",
},
{
id: 3,
name: "three",
},
{
id: 4,
name: "four",
},
{
id: 5,
name: "five",
},
])
})
it("should be able to transform a query", async () => {
const query = await createQuery({
fields: {
sql: "SELECT * FROM test_table WHERE id = 1",
},
transformer: `
data[0].id = data[0].id + 1;
return data;
`,
})
const result = await config.api.query.execute(query._id!)
expect(result.data).toEqual([
{
id: 2,
name: "one",
},
])
})
it("should be able to insert with bindings", async () => {
const query = await createQuery({
fields: {
sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
},
parameters: [
{
name: "foo",
default: "bar",
},
],
queryVerb: "create",
})
const result = await config.api.query.execute(query._id!, {
parameters: {
foo: "baz",
},
})
expect(result.data).toEqual([
{
created: true,
},
])
await withClient(async client => {
const { rows } = await client.query(
"SELECT * FROM test_table WHERE name = 'baz'"
)
expect(rows).toHaveLength(1)
})
})
})

View File

@ -16,9 +16,9 @@ jest.mock("@budibase/backend-core", () => {
},
}
})
import * as setup from "./utilities"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import { checkCacheForDynamicVariable } from "../../../threads/utils"
import * as setup from "../utilities"
import { checkBuilderEndpoint } from "../utilities/TestFunctions"
import { checkCacheForDynamicVariable } from "../../../../threads/utils"
const { basicQuery, basicDatasource } = setup.structures
import { events, db as dbCore } from "@budibase/backend-core"

View File

@ -12,7 +12,6 @@ import {
FieldTypeSubtypes,
FormulaType,
INTERNAL_TABLE_SOURCE_ID,
MonthlyQuotaName,
PermissionLevel,
QuotaUsageType,
RelationshipType,
@ -53,7 +52,7 @@ describe.each([
afterAll(async () => {
if (dsProvider) {
await dsProvider.stopContainer()
await dsProvider.stop()
}
setup.afterAll()
})
@ -63,7 +62,7 @@ describe.each([
if (dsProvider) {
await config.createDatasource({
datasource: await dsProvider.getDsConfig(),
datasource: await dsProvider.datasource(),
})
}
})
@ -117,16 +116,6 @@ describe.each([
return total
}
const getQueryUsage = async () => {
const { total } = await config.doInContext(null, () =>
quotas.getCurrentUsageValues(
QuotaUsageType.MONTHLY,
MonthlyQuotaName.QUERIES
)
)
return total
}
const assertRowUsage = async (expected: number) => {
const usage = await getRowUsage()
expect(usage).toBe(expected)
@ -162,7 +151,6 @@ describe.each([
describe("save, load, update", () => {
it("returns a success message when the row is created", async () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await request
.post(`/api/${tableId}/rows`)
@ -180,7 +168,6 @@ describe.each([
it("Increment row autoId per create row request", async () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const tableConfig = generateTableConfig()
const newTable = await createTable(
@ -231,7 +218,6 @@ describe.each([
it("updates a row successfully", async () => {
const existing = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.save(tableId, {
_id: existing._id,
@ -246,7 +232,6 @@ describe.each([
it("should load a row", async () => {
const existing = await config.createRow()
const queryUsage = await getQueryUsage()
const res = await config.api.row.get(tableId, existing._id!)
@ -268,7 +253,6 @@ describe.each([
}
const firstRow = await config.createRow({ tableId })
await config.createRow(newRow)
const queryUsage = await getQueryUsage()
const res = await config.api.row.fetch(tableId)
@ -279,7 +263,6 @@ describe.each([
it("load should return 404 when row does not exist", async () => {
await config.createRow()
const queryUsage = await getQueryUsage()
await config.api.row.get(tableId, "1234567", {
expectStatus: 404,
@ -530,7 +513,6 @@ describe.each([
const existing = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const row = await config.api.row.patch(table._id!, {
_id: existing._id!,
@ -552,7 +534,6 @@ describe.each([
it("should throw an error when given improper types", async () => {
const existing = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.row.patch(
table._id!,
@ -650,7 +631,6 @@ describe.each([
it("should be able to delete a row", async () => {
const createdRow = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.delete(table._id!, [createdRow])
expect(res.body[0]._id).toEqual(createdRow._id)
@ -666,7 +646,6 @@ describe.each([
it("should return no errors on valid row", async () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.validate(table._id!, { name: "ivan" })
@ -677,7 +656,6 @@ describe.each([
it("should errors on invalid row", async () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.validate(table._id!, { name: 1 })
@ -703,7 +681,6 @@ describe.each([
const row1 = await config.createRow()
const row2 = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.delete(table._id!, [row1, row2])
@ -719,7 +696,6 @@ describe.each([
config.createRow(),
])
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.delete(table._id!, [
row1,
@ -735,7 +711,6 @@ describe.each([
it("should accept a valid row object and delete the row", async () => {
const row1 = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.delete(table._id!, row1)
@ -746,7 +721,6 @@ describe.each([
it("Should ignore malformed/invalid delete requests", async () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.row.delete(
table._id!,
@ -782,7 +756,6 @@ describe.each([
it("should be able to fetch tables contents via 'view'", async () => {
const row = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.legacyView.get(table._id!)
expect(res.body.length).toEqual(1)
@ -792,7 +765,6 @@ describe.each([
it("should throw an error if view doesn't exist", async () => {
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.legacyView.get("derp", { expectStatus: 404 })
@ -808,7 +780,6 @@ describe.each([
})
const row = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
const res = await config.api.legacyView.get(view.name)
expect(res.body.length).toEqual(1)
@ -864,7 +835,6 @@ describe.each([
}
)
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
// test basic enrichment
const resBasic = await config.api.row.get(
@ -1100,7 +1070,6 @@ describe.each([
const createdRow = await config.createRow()
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.row.delete(view.id, [createdRow])
@ -1127,7 +1096,6 @@ describe.each([
config.createRow(),
])
const rowUsage = await getRowUsage()
const queryUsage = await getQueryUsage()
await config.api.row.delete(view.id, [rows[0], rows[2]])
@ -2063,7 +2031,7 @@ describe.each([
describe("Formula JS protection", () => {
it("should time out JS execution if a single cell takes too long", async () => {
await config.withEnv({ JS_PER_EXECUTION_TIME_LIMIT_MS: 20 }, async () => {
await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => {
const js = Buffer.from(
`
let i = 0;
@ -2103,8 +2071,8 @@ describe.each([
it("should time out JS execution if a multiple cells take too long", async () => {
await config.withEnv(
{
JS_PER_EXECUTION_TIME_LIMIT_MS: 20,
JS_PER_REQUEST_TIME_LIMIT_MS: 40,
JS_PER_INVOCATION_TIMEOUT_MS: 20,
JS_PER_REQUEST_TIMEOUT_MS: 40,
},
async () => {
const js = Buffer.from(

View File

@ -80,7 +80,7 @@ describe("/static", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.signedUrl).toEqual("http://test.com/foo/bar")
expect(res.body.signedUrl).toEqual("http://example.com/foo/bar")
expect(res.body.publicUrl).toEqual(
`https://${bucket}.s3.eu-west-1.amazonaws.com/${key}`
)

View File

@ -16,7 +16,7 @@ import {
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import sdk from "../../../sdk"
import uuid from "uuid"
import * as uuid from "uuid"
const { basicTable } = setup.structures

View File

@ -9,7 +9,7 @@ function user() {
_id: "user",
_rev: "rev",
createdAt: Date.now(),
email: "test@test.com",
email: "test@example.com",
roles: {},
tenantId: "default",
status: "active",

View File

@ -11,7 +11,7 @@ describe("test the outgoing webhook action", () => {
await config.init()
inputs = {
username: "joe_bloggs",
url: "http://www.test.com",
url: "http://www.example.com",
}
})
@ -19,7 +19,7 @@ describe("test the outgoing webhook action", () => {
it("should be able to run the action", async () => {
const res = await setup.runStep(setup.actions.discord.stepId, inputs)
expect(res.response.url).toEqual("http://www.test.com")
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.success).toEqual(true)
})

View File

@ -9,34 +9,40 @@ describe("test the execute script action", () => {
afterAll(setup.afterAll)
it("should be able to execute a script", async () => {
let res = await setup.runStep(
setup.actions.EXECUTE_SCRIPT.stepId,
(inputs = {
code: "return 1 + 1",
})
)
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
code: "return 1 + 1",
})
expect(res.value).toEqual(2)
expect(res.success).toEqual(true)
})
it("should handle a null value", async () => {
let res = await setup.runStep(
setup.actions.EXECUTE_SCRIPT.stepId,
(inputs = {
code: null,
})
)
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
code: null,
})
expect(res.response.message).toEqual("Invalid inputs")
expect(res.success).toEqual(false)
})
it("should be able to handle an error gracefully", async () => {
let res = await setup.runStep(
it("should be able to get a value from context", async () => {
const res = await setup.runStep(
setup.actions.EXECUTE_SCRIPT.stepId,
(inputs = {
code: "return something.map(x => x.name)",
})
{
code: "return steps.map(d => d.value)",
},
{
steps: [{ value: 0 }, { value: 1 }],
}
)
expect(res.value).toEqual([0, 1])
expect(res.response).toBeUndefined()
expect(res.success).toEqual(true)
})
it("should be able to handle an error gracefully", async () => {
const res = await setup.runStep(setup.actions.EXECUTE_SCRIPT.stepId, {
code: "return something.map(x => x.name)",
})
expect(res.response).toEqual("ReferenceError: something is not defined")
expect(res.success).toEqual(false)
})

View File

@ -12,9 +12,9 @@ describe("test the outgoing webhook action", () => {
it("should be able to run the action", async () => {
const res = await runStep(actions.integromat.stepId, {
value1: "test",
url: "http://www.test.com",
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.test.com")
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.success).toEqual(true)
})
@ -30,9 +30,9 @@ describe("test the outgoing webhook action", () => {
body: {
value: payload,
},
url: "http://www.test.com",
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.test.com")
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.response.body).toEqual(payload)
expect(res.success).toEqual(true)
@ -45,7 +45,7 @@ describe("test the outgoing webhook action", () => {
body: {
value: payload,
},
url: "http://www.test.com",
url: "http://www.example.com",
})
expect(res.httpStatus).toEqual(400)
expect(res.response).toEqual("Invalid payload JSON")

View File

@ -11,7 +11,7 @@ describe("test the outgoing webhook action", () => {
await config.init()
inputs = {
requestMethod: "POST",
url: "www.test.com",
url: "www.example.com",
requestBody: JSON.stringify({
a: 1,
}),
@ -26,7 +26,7 @@ describe("test the outgoing webhook action", () => {
inputs
)
expect(res.success).toEqual(true)
expect(res.response.url).toEqual("http://www.test.com")
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("POST")
expect(JSON.parse(res.response.body).a).toEqual(1)
})

View File

@ -33,7 +33,7 @@ describe("test the outgoing webhook action", () => {
jest
.spyOn(workerRequests, "sendSmtpEmail")
.mockImplementationOnce(async () =>
generateResponse("user1@test.com", "admin@test.com")
generateResponse("user1@example.com", "admin@example.com")
)
const invite = {
startTime: new Date(),
@ -43,8 +43,8 @@ describe("test the outgoing webhook action", () => {
url: "url",
}
inputs = {
to: "user1@test.com",
from: "admin@test.com",
to: "user1@example.com",
from: "admin@example.com",
subject: "hello",
contents: "testing",
cc: "cc",
@ -61,8 +61,8 @@ describe("test the outgoing webhook action", () => {
expect(res.success).toEqual(true)
expect(workerRequests.sendSmtpEmail).toHaveBeenCalledTimes(1)
expect(workerRequests.sendSmtpEmail).toHaveBeenCalledWith({
to: "user1@test.com",
from: "admin@test.com",
to: "user1@example.com",
from: "admin@example.com",
subject: "hello",
contents: "testing",
cc: "cc",

View File

@ -12,9 +12,9 @@ describe("test the outgoing webhook action", () => {
it("should be able to run the action", async () => {
const res = await runStep(actions.zapier.stepId, {
value1: "test",
url: "http://www.test.com",
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.test.com")
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.success).toEqual(true)
})
@ -30,9 +30,9 @@ describe("test the outgoing webhook action", () => {
body: {
value: payload,
},
url: "http://www.test.com",
url: "http://www.example.com",
})
expect(res.response.url).toEqual("http://www.test.com")
expect(res.response.url).toEqual("http://www.example.com")
expect(res.response.method).toEqual("post")
expect(res.response.body).toEqual(
`{"platform":"budibase","value1":1,"value2":2,"value3":3,"value4":4,"value5":5,"name":"Adam","age":9}`
@ -47,7 +47,7 @@ describe("test the outgoing webhook action", () => {
body: {
value: payload,
},
url: "http://www.test.com",
url: "http://www.example.com",
})
expect(res.httpStatus).toEqual(400)
expect(res.response).toEqual("Invalid payload JSON")

View File

@ -1,4 +1,4 @@
const { v4 } = require("uuid")
import { v4 } from "uuid"
export default function (): string {
return v4().replace(/-/g, "")

View File

@ -71,9 +71,9 @@ const environment = {
SELF_HOSTED: process.env.SELF_HOSTED,
HTTP_MB_LIMIT: process.env.HTTP_MB_LIMIT,
FORKED_PROCESS_NAME: process.env.FORKED_PROCESS_NAME || "main",
JS_PER_EXECUTION_TIME_LIMIT_MS:
JS_PER_INVOCATION_TIMEOUT_MS:
parseIntSafe(process.env.JS_PER_EXECUTION_TIME_LIMIT_MS) || 1000,
JS_PER_REQUEST_TIME_LIMIT_MS: parseIntSafe(
JS_PER_REQUEST_TIMEOUT_MS: parseIntSafe(
process.env.JS_PER_REQUEST_TIME_LIMIT_MS
),
// old
@ -95,6 +95,8 @@ const environment = {
TOP_LEVEL_PATH:
process.env.TOP_LEVEL_PATH || process.env.SERVER_TOP_LEVEL_PATH,
APP_MIGRATION_TIMEOUT: parseIntSafe(process.env.APP_MIGRATION_TIMEOUT),
JS_RUNNER_MEMORY_LIMIT:
parseIntSafe(process.env.JS_RUNNER_MEMORY_LIMIT) || 64,
}
// threading can cause memory issues with node-ts in development

View File

@ -41,12 +41,12 @@ describe("postgres integrations", () => {
makeRequest = generateMakeRequest(apiKey, true)
postgresDatasource = await config.api.datasource.create(
await databaseTestProviders.postgres.getDsConfig()
await databaseTestProviders.postgres.datasource()
)
})
afterAll(async () => {
await databaseTestProviders.postgres.stopContainer()
await databaseTestProviders.postgres.stop()
})
beforeEach(async () => {
@ -1041,14 +1041,14 @@ describe("postgres integrations", () => {
describe("POST /api/datasources/verify", () => {
it("should be able to verify the connection", async () => {
const response = await config.api.datasource.verify({
datasource: await databaseTestProviders.postgres.getDsConfig(),
datasource: await databaseTestProviders.postgres.datasource(),
})
expect(response.status).toBe(200)
expect(response.body.connected).toBe(true)
})
it("should state an invalid datasource cannot connect", async () => {
const dbConfig = await databaseTestProviders.postgres.getDsConfig()
const dbConfig = await databaseTestProviders.postgres.datasource()
const response = await config.api.datasource.verify({
datasource: {
...dbConfig,
@ -1082,7 +1082,7 @@ describe("postgres integrations", () => {
beforeEach(async () => {
client = new Client(
(await databaseTestProviders.postgres.getDsConfig()).config!
(await databaseTestProviders.postgres.datasource()).config!
)
await client.connect()
})
@ -1125,7 +1125,7 @@ describe("postgres integrations", () => {
schema2 = "test-2"
beforeAll(async () => {
const dsConfig = await databaseTestProviders.postgres.getDsConfig()
const dsConfig = await databaseTestProviders.postgres.datasource()
const dbConfig = dsConfig.config!
client = new Client(dbConfig)

View File

@ -21,7 +21,6 @@ import environment from "../environment"
interface MongoDBConfig {
connectionString: string
db: string
tlsCertificateFile: string
tlsCertificateKeyFile: string
tlsCAFile: string
}
@ -320,16 +319,11 @@ const getSchema = () => {
if (environment.SELF_HOSTED) {
schema.datasource = {
...schema.datasource,
//@ts-ignore
// @ts-ignore
tls: {
type: DatasourceFieldType.FIELD_GROUP,
display: "Configure SSL",
fields: {
tlsCertificateFile: {
type: DatasourceFieldType.STRING,
required: false,
display: "Certificate file path",
},
tlsCertificateKeyFile: {
type: DatasourceFieldType.STRING,
required: false,
@ -356,7 +350,6 @@ class MongoIntegration implements IntegrationBase {
constructor(config: MongoDBConfig) {
this.config = config
const options: MongoClientOptions = {
tlsCertificateFile: config.tlsCertificateFile || undefined,
tlsCertificateKeyFile: config.tlsCertificateKeyFile || undefined,
tlsCAFile: config.tlsCAFile || undefined,
}
@ -525,7 +518,10 @@ class MongoIntegration implements IntegrationBase {
return await collection.findOneAndUpdate(
findAndUpdateJson.filter,
findAndUpdateJson.update,
findAndUpdateJson.options
{
...findAndUpdateJson.options,
includeResultMetadata: true,
}
)
}
case "count": {

View File

@ -29,6 +29,7 @@ import { Client, ClientConfig, types } from "pg"
import { getReadableErrorMessage } from "./base/errorMapping"
import { exec } from "child_process"
import { storeTempFile } from "../utilities/fileSystem"
import { env } from "@budibase/backend-core"
// Return "date" and "timestamp" types as plain strings.
// This lets us reference the original stored timezone.
@ -202,8 +203,13 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
await this.openConnection()
response.connected = true
} catch (e: any) {
console.log(e)
response.error = e.message as string
if (typeof e.message === "string" && e.message !== "") {
response.error = e.message as string
} else if (typeof e.code === "string" && e.code !== "") {
response.error = e.code
} else {
response.error = "Unknown error"
}
} finally {
await this.closeConnection()
}
@ -428,6 +434,14 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
}
async getExternalSchema() {
if (!env.SELF_HOSTED) {
// This is because it relies on shelling out to pg_dump and we don't want
// to enable shell injection attacks.
throw new Error(
"schema export for Postgres is not supported in Budibase Cloud"
)
}
const dumpCommandParts = [
`user=${this.config.user}`,
`host=${this.config.host}`,

View File

@ -221,6 +221,7 @@ describe("MongoDB Integration", () => {
})
expect(args[2]).toEqual({
upsert: false,
includeResultMetadata: true,
})
})

View File

@ -1,14 +1,16 @@
jest.unmock("pg")
import { Datasource } from "@budibase/types"
import * as pg from "./postgres"
import * as postgres from "./postgres"
import * as mongodb from "./mongodb"
import { StartedTestContainer } from "testcontainers"
jest.setTimeout(30000)
export interface DatabasePlusTestProvider {
getDsConfig(): Promise<Datasource>
export interface DatabaseProvider {
start(): Promise<StartedTestContainer>
stop(): Promise<void>
datasource(): Promise<Datasource>
}
export const databaseTestProviders = {
postgres: pg,
}
export const databaseTestProviders = { postgres, mongodb }

View File

@ -0,0 +1,41 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
let container: StartedTestContainer | undefined
export async function start(): Promise<StartedTestContainer> {
return await new GenericContainer("mongo:7.0-jammy")
.withExposedPorts(27017)
.withEnvironment({
MONGO_INITDB_ROOT_USERNAME: "mongo",
MONGO_INITDB_ROOT_PASSWORD: "password",
})
.withWaitStrategy(
Wait.forSuccessfulCommand(`mongosh --eval "db.version()"`)
)
.start()
}
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
}
const host = container.getHost()
const port = container.getMappedPort(27017)
return {
type: "datasource",
source: SourceName.MONGODB,
plus: false,
config: {
connectionString: `mongodb://mongo:password@${host}:${port}`,
db: "mongo",
},
}
}
export async function stop() {
if (container) {
await container.stop()
container = undefined
}
}

View File

@ -3,45 +3,44 @@ import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
let container: StartedTestContainer | undefined
export async function getDsConfig(): Promise<Datasource> {
try {
if (!container) {
container = await new GenericContainer("postgres:16.1-bullseye")
.withExposedPorts(5432)
.withEnvironment({ POSTGRES_PASSWORD: "password" })
.withWaitStrategy(
Wait.forLogMessage(
"database system is ready to accept connections",
2
)
)
.start()
}
const host = container.getHost()
const port = container.getMappedPort(5432)
export async function start(): Promise<StartedTestContainer> {
return await new GenericContainer("postgres:16.1-bullseye")
.withExposedPorts(5432)
.withEnvironment({ POSTGRES_PASSWORD: "password" })
.withWaitStrategy(
Wait.forSuccessfulCommand(
"pg_isready -h localhost -p 5432"
).withStartupTimeout(10000)
)
.start()
}
return {
type: "datasource_plus",
source: SourceName.POSTGRES,
plus: true,
config: {
host,
port,
database: "postgres",
user: "postgres",
password: "password",
schema: "public",
ssl: false,
rejectUnauthorized: false,
ca: false,
},
}
} catch (err) {
throw new Error("**UNABLE TO CREATE TO POSTGRES CONTAINER**")
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
}
const host = container.getHost()
const port = container.getMappedPort(5432)
return {
type: "datasource_plus",
source: SourceName.POSTGRES,
plus: true,
config: {
host,
port,
database: "postgres",
user: "postgres",
password: "password",
schema: "public",
ssl: false,
rejectUnauthorized: false,
ca: false,
},
}
}
export async function stopContainer() {
export async function stop() {
if (container) {
await container.stop()
container = undefined

View File

@ -1,61 +0,0 @@
import vm from "vm"
import env from "./environment"
import { setJSRunner } from "@budibase/string-templates"
import { context, timers } from "@budibase/backend-core"
import tracer from "dd-trace"
type TrackerFn = <T>(f: () => T) => T
export function init() {
setJSRunner((js: string, ctx: vm.Context) => {
return tracer.trace("runJS", {}, span => {
const perRequestLimit = env.JS_PER_REQUEST_TIME_LIMIT_MS
let track: TrackerFn = f => f()
if (perRequestLimit) {
const bbCtx = tracer.trace("runJS.getCurrentContext", {}, span =>
context.getCurrentContext()
)
if (bbCtx) {
if (!bbCtx.jsExecutionTracker) {
span?.addTags({
createdExecutionTracker: true,
})
bbCtx.jsExecutionTracker = tracer.trace(
"runJS.createExecutionTimeTracker",
{},
span => timers.ExecutionTimeTracker.withLimit(perRequestLimit)
)
}
span?.addTags({
js: {
limitMS: bbCtx.jsExecutionTracker.limitMs,
elapsedMS: bbCtx.jsExecutionTracker.elapsedMS,
},
})
// We call checkLimit() here to prevent paying the cost of creating
// a new VM context below when we don't need to.
tracer.trace("runJS.checkLimitAndBind", {}, span => {
bbCtx.jsExecutionTracker!.checkLimit()
track = bbCtx.jsExecutionTracker!.track.bind(
bbCtx.jsExecutionTracker
)
})
}
}
ctx = {
...ctx,
alert: undefined,
setInterval: undefined,
setTimeout: undefined,
}
vm.createContext(ctx)
return track(() =>
vm.runInNewContext(js, ctx, {
timeout: env.JS_PER_EXECUTION_TIME_LIMIT_MS,
})
)
})
})
}

View File

@ -0,0 +1,15 @@
# Bundles for isolated-vm
[Isolated-vm](https://github.com/laverdet/isolated-vm) requires for us to have some libraries, such as string-templates helpers, built in a single file without external dependencies. These libraries are pretty much static. To avoid building this in every dev command, in every test command and in every pipeline, these libraries are already compiled and commited into the repo.
## How are they consumed?
These libaries are compiled with a special extension: .ivm.bundle.js. This extension is configured in [esbuild](/scripts/build.js) in order to not be bundled as javascript, and to be treated as a `string` instead. This will allow us to read it's context on runtime and inject it to `isolated-vm`.
## How to update it?
These libraries are pretty much static, but they might require some updates from time to time when something changes on the source code. In order to do this, we just need to run the following command and commit the updated bundles:
```
yarn build:isolated-vm-libs
```

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,4 @@
import { EJSON } from "bson"
export { deserialize } from "bson"
export const toJson = EJSON.deserialize

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,12 @@
const {
getJsHelperList,
} = require("../../../../string-templates/src/helpers/list.js")
const helpers = getJsHelperList()
export default {
...helpers,
// pointing stripProtocol to a unexisting function to be able to declare it on isolated-vm
// @ts-ignore
// eslint-disable-next-line no-undef
stripProtocol: helpersStripProtocol,
}

View File

@ -0,0 +1,28 @@
import { utils } from "@budibase/shared-core"
import environment from "../../environment"
import fs from "fs"
export const enum BundleType {
HELPERS = "helpers",
BSON = "bson",
}
const bundleSourceCode = {
[BundleType.HELPERS]: "../bundles/index-helpers.ivm.bundle.js",
[BundleType.BSON]: "../bundles/bson.ivm.bundle.js",
}
export function loadBundle(type: BundleType) {
if (environment.isJest()) {
return fs.readFileSync(require.resolve(bundleSourceCode[type]), "utf-8")
}
switch (type) {
case BundleType.HELPERS:
return require("../bundles/index-helpers.ivm.bundle.js")
case BundleType.BSON:
return require("../bundles/bson.ivm.bundle.js")
default:
utils.unreachable(type)
}
}

View File

@ -0,0 +1,42 @@
import env from "../environment"
import { setJSRunner, JsErrorTimeout } from "@budibase/string-templates"
import tracer from "dd-trace"
import { IsolatedVM } from "./vm"
import { context } from "@budibase/backend-core"
export function init() {
setJSRunner((js: string, ctx: Record<string, any>) => {
return tracer.trace("runJS", {}, span => {
try {
const bbCtx = context.getCurrentContext()!
let { vm } = bbCtx
if (!vm) {
// Can't copy the native helpers into the isolate. We just ignore them as they are handled properly from the helpersSource
const { helpers, ...ctxToPass } = ctx
vm = new IsolatedVM({
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
})
.withContext(ctxToPass)
.withHelpers()
bbCtx.vm = vm
}
const result = vm.execute(js)
return result
} catch (error: any) {
if (error.message === "Script execution timed out.") {
throw new JsErrorTimeout()
}
throw error
}
})
})
}

View File

@ -0,0 +1,82 @@
import { validate as isValidUUID } from "uuid"
jest.mock("@budibase/handlebars-helpers/lib/math", () => {
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/math")
return {
...actual,
random: () => 10,
}
})
jest.mock("@budibase/handlebars-helpers/lib/uuid", () => {
const actual = jest.requireActual("@budibase/handlebars-helpers/lib/uuid")
return {
...actual,
uuid: () => "f34ebc66-93bd-4f7c-b79b-92b5569138bc",
}
})
import { processStringSync, encodeJSBinding } from "@budibase/string-templates"
const { runJsHelpersTests } = require("@budibase/string-templates/test/utils")
import tk from "timekeeper"
import { init } from ".."
import TestConfiguration from "../../tests/utilities/TestConfiguration"
tk.freeze("2021-01-21T12:00:00")
describe("jsRunner", () => {
const config = new TestConfiguration()
beforeAll(async () => {
// Register js runner
init()
await config.init()
})
const processJS = (js: string, context?: object) => {
return config.doInContext(config.getAppId(), async () =>
processStringSync(encodeJSBinding(js), context || {})
)
}
it("it can run a basic javascript", async () => {
const output = await processJS(`return 1 + 2`)
expect(output).toBe(3)
})
it("should prevent sandbox escape", async () => {
const output = await processJS(
`return this.constructor.constructor("return process")()`
)
expect(output).toBe("Error while executing JS")
})
describe("helpers", () => {
runJsHelpersTests({
funcWrap: (func: any) => config.doInContext(config.getAppId(), func),
testsToSkip: ["random", "uuid"],
})
describe("uuid", () => {
it("uuid helper returns a valid uuid", async () => {
const result = await processJS("return helpers.uuid()")
expect(result).toBeDefined()
expect(isValidUUID(result)).toBe(true)
})
})
describe("random", () => {
it("random helper returns a valid number", async () => {
const min = 1
const max = 8
const result = await processJS(`return helpers.random(${min}, ${max})`)
expect(result).toBeDefined()
expect(result).toBeGreaterThanOrEqual(min)
expect(result).toBeLessThanOrEqual(max)
})
})
})
})

View File

@ -0,0 +1,270 @@
import ivm from "isolated-vm"
import bson from "bson"
import url from "url"
import crypto from "crypto"
import querystring from "querystring"
import { BundleType, loadBundle } from "../bundles"
import { VM } from "@budibase/types"
class ExecutionTimeoutError extends Error {
constructor(message: string) {
super(message)
this.name = "ExecutionTimeoutError"
}
}
class ModuleHandler {
private modules: {
import: string
moduleKey: string
module: ivm.Module
}[] = []
private generateRandomKey = () => `i${crypto.randomUUID().replace(/-/g, "")}`
registerModule(module: ivm.Module, imports: string) {
this.modules.push({
moduleKey: this.generateRandomKey(),
import: imports,
module: module,
})
}
generateImports() {
return this.modules
.map(m => `import ${m.import} from "${m.moduleKey}"`)
.join(";")
}
getModule(key: string) {
const module = this.modules.find(m => m.moduleKey === key)
return module?.module
}
}
export class IsolatedVM implements VM {
private isolate: ivm.Isolate
private vm: ivm.Context
private jail: ivm.Reference
private invocationTimeout: number
private isolateAccumulatedTimeout?: number
// By default the wrapper returns itself
private codeWrapper: (code: string) => string = code => code
private moduleHandler = new ModuleHandler()
private readonly resultKey = "results"
constructor({
memoryLimit,
invocationTimeout,
isolateAccumulatedTimeout,
}: {
memoryLimit: number
invocationTimeout: number
isolateAccumulatedTimeout?: number
}) {
this.isolate = new ivm.Isolate({ memoryLimit })
this.vm = this.isolate.createContextSync()
this.jail = this.vm.global
this.jail.setSync("global", this.jail.derefInto())
this.addToContext({
[this.resultKey]: { out: "" },
})
this.invocationTimeout = invocationTimeout
this.isolateAccumulatedTimeout = isolateAccumulatedTimeout
}
withHelpers() {
const urlModule = this.registerCallbacks({
resolve: url.resolve,
parse: url.parse,
})
const querystringModule = this.registerCallbacks({
escape: querystring.escape,
})
this.addToContext({
helpersStripProtocol: new ivm.Callback((str: string) => {
var parsed = url.parse(str) as any
parsed.protocol = ""
return parsed.format()
}),
})
const injectedRequire = `const require=function req(val) {
switch (val) {
case "url": return ${urlModule};
case "querystring": return ${querystringModule};
}
}`
const helpersSource = loadBundle(BundleType.HELPERS)
const helpersModule = this.isolate.compileModuleSync(
`${injectedRequire};${helpersSource}`
)
helpersModule.instantiateSync(this.vm, specifier => {
if (specifier === "crypto") {
const cryptoModule = this.registerCallbacks({
randomUUID: crypto.randomUUID,
})
const module = this.isolate.compileModuleSync(
`export default ${cryptoModule}`
)
module.instantiateSync(this.vm, specifier => {
throw new Error(`No imports allowed. Required: ${specifier}`)
})
return module
}
throw new Error(`No imports allowed. Required: ${specifier}`)
})
this.moduleHandler.registerModule(helpersModule, "helpers")
return this
}
withContext(context: Record<string, any>) {
this.addToContext(context)
return this
}
withParsingBson(data: any) {
this.addToContext({
bsonData: bson.BSON.serialize({ data }),
})
// If we need to parse bson, we follow the next steps:
// 1. Serialise the data from potential BSON to buffer before passing it to the isolate
// 2. Deserialise the data within the isolate, to get the original data
// 3. Process script
// 4. Stringify the result in order to convert the result from BSON to json
this.codeWrapper = code =>
`(function(){
const data = deserialize(bsonData, { validation: { utf8: false } }).data;
const result = ${code}
return toJson(result);
})();`
const bsonSource = loadBundle(BundleType.BSON)
this.addToContext({
textDecoderCb: new ivm.Callback(
(args: {
constructorArgs: any
functionArgs: Parameters<InstanceType<typeof TextDecoder>["decode"]>
}) => {
const result = new TextDecoder(...args.constructorArgs).decode(
...args.functionArgs
)
return result
}
),
})
// "Polyfilling" text decoder. `bson.deserialize` requires decoding. We are creating a bridge function so we don't need to inject the full library
const textDecoderPolyfill = class TextDecoder {
constructorArgs
constructor(...constructorArgs: any) {
this.constructorArgs = constructorArgs
}
decode(...input: any) {
// @ts-ignore
return textDecoderCb({
constructorArgs: this.constructorArgs,
functionArgs: input,
})
}
}.toString()
const bsonModule = this.isolate.compileModuleSync(
`${textDecoderPolyfill};${bsonSource}`
)
bsonModule.instantiateSync(this.vm, specifier => {
throw new Error(`No imports allowed. Required: ${specifier}`)
})
this.moduleHandler.registerModule(bsonModule, "{deserialize, toJson}")
return this
}
execute(code: string): any {
if (this.isolateAccumulatedTimeout) {
const cpuMs = Number(this.isolate.cpuTime) / 1e6
if (cpuMs > this.isolateAccumulatedTimeout) {
throw new ExecutionTimeoutError(
`CPU time limit exceeded (${cpuMs}ms > ${this.isolateAccumulatedTimeout}ms)`
)
}
}
code = `${this.moduleHandler.generateImports()};results.out=${this.codeWrapper(
code
)};`
const script = this.isolate.compileModuleSync(code)
script.instantiateSync(this.vm, specifier => {
const module = this.moduleHandler.getModule(specifier)
if (module) {
return module
}
throw new Error(`"${specifier}" import not allowed`)
})
script.evaluateSync({ timeout: this.invocationTimeout })
const result = this.getFromContext(this.resultKey)
return result.out
}
private registerCallbacks(functions: Record<string, any>) {
const libId = crypto.randomUUID().replace(/-/g, "")
const x: Record<string, string> = {}
for (const [funcName, func] of Object.entries(functions)) {
const key = `f${libId}${funcName}cb`
x[funcName] = key
this.addToContext({
[key]: new ivm.Callback((...params: any[]) => (func as any)(...params)),
})
}
const mod =
`{` +
Object.entries(x)
.map(([key, func]) => `${key}: ${func}`)
.join() +
"}"
return mod
}
private addToContext(context: Record<string, any>) {
for (let key in context) {
const value = context[key]
this.jail.setSync(
key,
typeof value === "function"
? value
: new ivm.ExternalCopy(value).copyInto({ release: true })
)
}
}
private getFromContext(key: string) {
const ref = this.vm.global.getSync(key, { reference: true })
const result = ref.copySync()
ref.release()
return result
}
}

View File

@ -46,7 +46,7 @@ export const smtp = (conf?: SMTPConfig): SMTPConfig => {
config: {
port: 12345,
host: "smtptesthost.com",
from: "testfrom@test.com",
from: "testfrom@example.com",
subject: "Hello!",
secure: false,
...conf,

View File

@ -94,8 +94,8 @@ function buildRoles() {
}
describe("app user/group sync", () => {
const groupEmail = "test2@test.com",
normalEmail = "test@test.com"
const groupEmail = "test2@example.com",
normalEmail = "test@example.com"
async function checkEmail(
email: string,
opts?: { group?: boolean; notFound?: boolean }
@ -131,7 +131,7 @@ describe("app user/group sync", () => {
})
it("should be able to handle builder users", async () => {
await createUser("test3@test.com", {}, true)
await checkEmail("test3@test.com")
await createUser("test3@example.com", {}, true)
await checkEmail("test3@example.com")
})
})

View File

@ -14,6 +14,7 @@ import {
ATTACHMENT_DIRECTORY,
} from "./constants"
import fs from "fs"
import fsp from "fs/promises"
import { join } from "path"
import env from "../../../environment"
import { v4 as uuid } from "uuid"
@ -117,7 +118,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
ObjectStoreBuckets.APPS,
join(appPath, path)
)
fs.writeFileSync(join(tmpPath, path), contents)
await fsp.writeFile(join(tmpPath, path), contents)
}
}
// get all the files
@ -131,14 +132,14 @@ export async function exportApp(appId: string, config?: ExportOpts) {
const downloadedPath = join(tmpPath, appPath)
if (fs.existsSync(downloadedPath)) {
const allFiles = fs.readdirSync(downloadedPath)
const allFiles = await fsp.readdir(downloadedPath)
for (let file of allFiles) {
const path = join(downloadedPath, file)
// move out of app directory, simplify structure
fs.renameSync(path, join(downloadedPath, "..", file))
await fsp.rename(path, join(downloadedPath, "..", file))
}
// remove the old app directory created by object export
fs.rmdirSync(downloadedPath)
await fsp.rmdir(downloadedPath)
}
// enforce an export of app DB to the tmp path
const dbPath = join(tmpPath, DB_EXPORT_FILE)
@ -148,7 +149,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
})
if (config?.encryptPassword) {
for (let file of fs.readdirSync(tmpPath)) {
for (let file of await fsp.readdir(tmpPath)) {
const path = join(tmpPath, file)
// skip the attachments - too big to encrypt
@ -157,7 +158,7 @@ export async function exportApp(appId: string, config?: ExportOpts) {
{ dir: tmpPath, filename: file },
config.encryptPassword
)
fs.rmSync(path)
await fsp.rm(path)
}
}
}
@ -165,9 +166,9 @@ export async function exportApp(appId: string, config?: ExportOpts) {
// if tar requested, return where the tarball is
if (config?.tar) {
// now the tmpPath contains both the DB export and attachments, tar this
const tarPath = await tarFilesToTmp(tmpPath, fs.readdirSync(tmpPath))
const tarPath = await tarFilesToTmp(tmpPath, await fsp.readdir(tmpPath))
// cleanup the tmp export files as tarball returned
fs.rmSync(tmpPath, { recursive: true, force: true })
await fsp.rm(tmpPath, { recursive: true, force: true })
return tarPath
}

View File

@ -17,6 +17,7 @@ import { downloadTemplate } from "../../../utilities/fileSystem"
import { ObjectStoreBuckets } from "../../../constants"
import { join } from "path"
import fs from "fs"
import fsp from "fs/promises"
import sdk from "../../"
import { v4 as uuid } from "uuid"
import tar from "tar"
@ -119,7 +120,7 @@ async function getTemplateStream(template: TemplateType) {
export async function untarFile(file: { path: string }) {
const tmpPath = join(budibaseTempDir(), uuid())
fs.mkdirSync(tmpPath)
await fsp.mkdir(tmpPath)
// extract the tarball
await tar.extract({
cwd: tmpPath,
@ -130,12 +131,12 @@ export async function untarFile(file: { path: string }) {
async function decryptFiles(path: string, password: string) {
try {
for (let file of fs.readdirSync(path)) {
for (let file of await fsp.readdir(path)) {
const inputPath = join(path, file)
if (!inputPath.endsWith(ATTACHMENT_DIRECTORY)) {
const outputPath = inputPath.replace(/\.enc$/, "")
await encryption.decryptFile(inputPath, outputPath, password)
fs.rmSync(inputPath)
await fsp.rm(inputPath)
}
}
} catch (err: any) {
@ -164,14 +165,14 @@ export async function importApp(
let dbStream: any
const isTar = template.file && template?.file?.type?.endsWith("gzip")
const isDirectory =
template.file && fs.lstatSync(template.file.path).isDirectory()
template.file && (await fsp.lstat(template.file.path)).isDirectory()
let tmpPath: string | undefined = undefined
if (template.file && (isTar || isDirectory)) {
tmpPath = isTar ? await untarFile(template.file) : template.file.path
if (isTar && template.file.password) {
await decryptFiles(tmpPath, template.file.password)
}
const contents = fs.readdirSync(tmpPath)
const contents = await fsp.readdir(tmpPath)
// have to handle object import
if (contents.length && opts.importObjStoreContents) {
let promises = []
@ -182,7 +183,7 @@ export async function importApp(
continue
}
filename = join(prodAppId, filename)
if (fs.lstatSync(path).isDirectory()) {
if ((await fsp.lstat(path)).isDirectory()) {
promises.push(
objectStore.uploadDirectory(ObjectStoreBuckets.APPS, path, filename)
)
@ -211,7 +212,7 @@ export async function importApp(
await updateAutomations(prodAppId, db)
// clear up afterward
if (tmpPath) {
fs.rmSync(tmpPath, { recursive: true, force: true })
await fsp.rm(tmpPath, { recursive: true, force: true })
}
return ok
}

View File

@ -76,7 +76,7 @@ describe.each([tableWithUserCol, tableWithUsersCol])(
})
it("shouldn't change any other input", () => {
const email = "test@test.com"
const email = "test@example.com"
const params: SearchParams = {
tableId,
query: {

View File

@ -10,6 +10,7 @@ import { ApplicationAPI } from "./application"
import { BackupAPI } from "./backup"
import { AttachmentAPI } from "./attachment"
import { UserAPI } from "./user"
import { QueryAPI } from "./query"
export default class API {
table: TableAPI
@ -23,6 +24,7 @@ export default class API {
backup: BackupAPI
attachment: AttachmentAPI
user: UserAPI
query: QueryAPI
constructor(config: TestConfiguration) {
this.table = new TableAPI(config)
@ -36,5 +38,6 @@ export default class API {
this.backup = new BackupAPI(config)
this.attachment = new AttachmentAPI(config)
this.user = new UserAPI(config)
this.query = new QueryAPI(config)
}
}

View File

@ -0,0 +1,44 @@
import TestConfiguration from "../TestConfiguration"
import {
Query,
type ExecuteQueryRequest,
type ExecuteQueryResponse,
} from "@budibase/types"
import { TestAPI } from "./base"
export class QueryAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
create = async (body: Query): Promise<Query> => {
const res = await this.request
.post(`/api/queries`)
.set(this.config.defaultHeaders())
.send(body)
.expect("Content-Type", /json/)
if (res.status !== 200) {
throw new Error(JSON.stringify(res.body))
}
return res.body as Query
}
execute = async (
queryId: string,
body?: ExecuteQueryRequest
): Promise<ExecuteQueryResponse> => {
const res = await this.request
.post(`/api/v2/queries/${queryId}`)
.set(this.config.defaultHeaders())
.send(body)
.expect("Content-Type", /json/)
if (res.status !== 200) {
throw new Error(JSON.stringify(res.body))
}
return res.body
}
}

View File

@ -1,10 +1,10 @@
import { QuerySchema, Row } from "@budibase/types"
import { Datasource, QuerySchema, Row } from "@budibase/types"
export type WorkerCallback = (error: any, response?: any) => void
export interface QueryEvent {
appId?: string
datasource: any
datasource: Datasource
queryVerb: string
fields: { [key: string]: any }
parameters: { [key: string]: any }

View File

@ -14,13 +14,13 @@ import { context, cache, auth } from "@budibase/backend-core"
import { getGlobalIDFromUserMetadataID } from "../db/utils"
import sdk from "../sdk"
import { cloneDeep } from "lodash/fp"
import { Query } from "@budibase/types"
import { Datasource, Query, SourceName } from "@budibase/types"
import { isSQL } from "../integrations/utils"
import { interpolateSQL } from "../integrations/queries/sql"
class QueryRunner {
datasource: any
datasource: Datasource
queryVerb: string
queryId: string
fields: any
@ -68,7 +68,7 @@ class QueryRunner {
throw "Integration type does not exist."
}
if (datasourceClone.config.authConfigs) {
if (datasourceClone.config?.authConfigs) {
const updatedConfigs = []
for (let config of datasourceClone.config.authConfigs) {
updatedConfigs.push(await sdk.queries.enrichContext(config, this.ctx))
@ -93,7 +93,7 @@ class QueryRunner {
const enrichedContext = { ...enrichedParameters, ...this.ctx }
// Parse global headers
if (datasourceClone.config.defaultHeaders) {
if (datasourceClone.config?.defaultHeaders) {
datasourceClone.config.defaultHeaders = await sdk.queries.enrichContext(
datasourceClone.config.defaultHeaders,
enrichedContext
@ -127,10 +127,16 @@ class QueryRunner {
// transform as required
if (transformer) {
const runner = new ScriptRunner(transformer, {
data: rows,
params: enrichedParameters,
})
const runner = new ScriptRunner(
transformer,
{
data: rows,
params: enrichedParameters,
},
{
parseBson: datasource.source === SourceName.MONGODB,
}
)
rows = runner.execute()
}

View File

@ -4,7 +4,7 @@ import { resolve, join } from "path"
import env from "../../environment"
import tar from "tar"
const uuid = require("uuid/v4")
import { v4 as uuid } from "uuid"
export const TOP_LEVEL_PATH =
env.TOP_LEVEL_PATH || resolve(join(__dirname, "..", "..", ".."))

View File

@ -1,28 +1,40 @@
import fetch from "node-fetch"
import { VM, VMScript } from "vm2"
import tracer, { Span } from "dd-trace"
import env from "../environment"
import { IsolatedVM } from "../jsRunner/vm"
const JS_TIMEOUT_MS = 1000
class ScriptRunner {
vm: VM
results: { out: string }
script: VMScript
private code: string
private vm: IsolatedVM
constructor(script: string, context: any) {
const code = `let fn = () => {\n${script}\n}; results.out = fn();`
this.vm = new VM({
timeout: JS_TIMEOUT_MS,
})
this.results = { out: "" }
this.vm.setGlobals(context)
this.vm.setGlobal("fetch", fetch)
this.vm.setGlobal("results", this.results)
this.script = new VMScript(code)
private tracerSpan: Span
constructor(script: string, context: any, { parseBson = false } = {}) {
this.tracerSpan = tracer.startSpan("scriptRunner", { tags: { parseBson } })
this.code = `(() => {${script}})();`
this.vm = new IsolatedVM({
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
invocationTimeout: JS_TIMEOUT_MS,
}).withContext(context)
if (parseBson && context.data) {
this.vm = this.vm.withParsingBson(context.data)
}
}
execute() {
this.vm.run(this.script)
return this.results.out
const result = tracer.trace(
"scriptRunner.execute",
{ childOf: this.tracerSpan },
() => {
const result = this.vm.execute(this.code)
return result
}
)
this.tracerSpan.finish()
return result
}
}

View File

@ -2,16 +2,17 @@
"name": "@budibase/string-templates",
"version": "0.0.0",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"main": "src/index.js",
"module": "dist/bundle.mjs",
"license": "MPL-2.0",
"types": "dist/index.d.ts",
"exports": {
".": {
"require": "./src/index.cjs",
"require": "./src/index.js",
"import": "./dist/bundle.mjs"
},
"./package.json": "./package.json"
"./package.json": "./package.json",
"./test/utils": "./test/utils.js"
},
"files": [
"dist",
@ -20,7 +21,7 @@
],
"scripts": {
"build": "tsc && rollup -c",
"dev": "tsc && rollup -cw",
"dev": "concurrently \"tsc --watch\" \"rollup -cw\"",
"test": "jest",
"manifest": "node ./scripts/gen-collection-info.js"
},
@ -28,12 +29,12 @@
"@budibase/handlebars-helpers": "^0.13.1",
"dayjs": "^1.10.8",
"handlebars": "^4.7.6",
"lodash.clonedeep": "^4.5.0",
"vm2": "^3.9.19"
"lodash.clonedeep": "^4.5.0"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^17.1.0",
"@rollup/plugin-json": "^4.1.0",
"concurrently": "^8.2.2",
"doctrine": "^3.0.0",
"jest": "29.7.0",
"marked": "^4.0.10",

View File

@ -0,0 +1,11 @@
class JsErrorTimeout extends Error {
code = "ERR_SCRIPT_EXECUTION_TIMEOUT"
constructor() {
super()
}
}
module.exports = {
JsErrorTimeout,
}

View File

@ -4,7 +4,7 @@ const { LITERAL_MARKER } = require("../helpers/constants")
const { getJsHelperList } = require("./list")
// The method of executing JS scripts depends on the bundle being built.
// This setter is used in the entrypoint (either index.cjs or index.mjs).
// This setter is used in the entrypoint (either index.js or index.mjs).
let runJS
module.exports.setJSRunner = runner => (runJS = runner)
@ -42,7 +42,7 @@ module.exports.processJS = (handlebars, context) => {
try {
// Wrap JS in a function and immediately invoke it.
// This is required to allow the final `return` statement to be valid.
const js = `function run(){${atob(handlebars)}};run();`
const js = `(function(){${atob(handlebars)}})();`
// Our $ context function gets a value from context.
// We clone the context to avoid mutation in the binding affecting real

View File

@ -1,29 +1,42 @@
const externalHandlebars = require("./external")
const helperList = require("@budibase/handlebars-helpers")
const { date, duration } = require("./date")
let helpers = undefined
// https://github.com/evanw/esbuild/issues/56
const externalCollections = {
math: require("@budibase/handlebars-helpers/lib/math"),
array: require("@budibase/handlebars-helpers/lib/array"),
number: require("@budibase/handlebars-helpers/lib/number"),
url: require("@budibase/handlebars-helpers/lib/url"),
string: require("@budibase/handlebars-helpers/lib/string"),
comparison: require("@budibase/handlebars-helpers/lib/comparison"),
object: require("@budibase/handlebars-helpers/lib/object"),
regex: require("@budibase/handlebars-helpers/lib/regex"),
uuid: require("@budibase/handlebars-helpers/lib/uuid"),
}
const helpersToRemoveForJs = ["sortBy"]
module.exports.helpersToRemoveForJs = helpersToRemoveForJs
const addedHelpers = {
date: date,
duration: duration,
}
let helpers = undefined
module.exports.getJsHelperList = () => {
if (helpers) {
return helpers
}
helpers = {}
let constructed = []
for (let collection of externalHandlebars.externalCollections) {
constructed.push(helperList[collection]())
}
for (let collection of constructed) {
for (let collection of Object.values(externalCollections)) {
for (let [key, func] of Object.entries(collection)) {
// Handlebars injects the hbs options to the helpers by default. We are adding an empty {} as a last parameter to simulate it
helpers[key] = (...props) => func(...props, {})
}
}
for (let key of Object.keys(externalHandlebars.addedHelpers)) {
helpers[key] = externalHandlebars.addedHelpers[key]
for (let key of Object.keys(addedHelpers)) {
helpers[key] = addedHelpers[key]
}
for (const toRemove of helpersToRemoveForJs) {

View File

@ -1,38 +0,0 @@
const templates = require("./index.js")
/**
* CJS entrypoint for rollup
*/
module.exports.isValid = templates.isValid
module.exports.makePropSafe = templates.makePropSafe
module.exports.getManifest = templates.getManifest
module.exports.isJSBinding = templates.isJSBinding
module.exports.encodeJSBinding = templates.encodeJSBinding
module.exports.decodeJSBinding = templates.decodeJSBinding
module.exports.processStringSync = templates.processStringSync
module.exports.processObjectSync = templates.processObjectSync
module.exports.processString = templates.processString
module.exports.processObject = templates.processObject
module.exports.doesContainStrings = templates.doesContainStrings
module.exports.doesContainString = templates.doesContainString
module.exports.disableEscaping = templates.disableEscaping
module.exports.findHBSBlocks = templates.findHBSBlocks
module.exports.convertToJS = templates.convertToJS
module.exports.setJSRunner = templates.setJSRunner
module.exports.FIND_ANY_HBS_REGEX = templates.FIND_ANY_HBS_REGEX
module.exports.helpersToRemoveForJs = templates.helpersToRemoveForJs
if (!process.env.NO_JS) {
const { VM } = require("vm2")
const { setJSRunner } = require("./helpers/javascript")
/**
* Use vm2 to run JS scripts in a node env
*/
setJSRunner((js, context) => {
const vm = new VM({
sandbox: context,
timeout: 1000,
})
return vm.run(js)
})
}

Some files were not shown because too many files have changed in this diff Show More