Merge branch 'master' of github.com:Budibase/budibase into views-openapi

This commit is contained in:
mike12345567 2024-11-20 15:02:06 +00:00
commit 2da873c6a1
44 changed files with 787 additions and 1864 deletions

View File

@ -9,8 +9,5 @@ packages/server/client
packages/server/coverage
packages/builder/.routify
packages/sdk/sdk
packages/account-portal/packages/server/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/ui/build
**/*.ivm.bundle.js
packages/server/build/oldClientVersions/**/**

View File

@ -64,18 +64,15 @@ jobs:
- run: yarn --frozen-lockfile
# Run build all the projects
- name: Build OSS
run: yarn build:oss
- name: Build account portal
run: yarn build:account-portal
if: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
- name: Build
run: yarn build
# Check the types of the projects built via esbuild
- name: Check types
run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then
yarn check:types --since=${{ env.NX_BASE_BRANCH }} --ignore @budibase/account-portal-server
yarn check:types --since=${{ env.NX_BASE_BRANCH }}
else
yarn check:types --ignore @budibase/account-portal-server
yarn check:types
fi
helm-lint:
@ -117,9 +114,9 @@ jobs:
- name: Test
run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then
yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions
else
yarn test --ignore=@budibase/worker --ignore=@budibase/server
yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix -- --verbose --reporters=default --reporters=github-actions
fi
test-worker:
@ -141,16 +138,22 @@ jobs:
- name: Test worker
run: |
if ${{ env.ONLY_AFFECTED_TASKS }}; then
node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }}
else
yarn test --scope=@budibase/worker
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/worker)
if [ -z "$AFFECTED" ]; then
echo "No affected tests to run"
exit 0
fi
fi
cd packages/worker
yarn test --verbose --reporters=default --reporters=github-actions
test-server:
runs-on: ubuntu-latest
strategy:
matrix:
datasource: [mssql, mysql, postgres, mongodb, mariadb, oracle, none]
datasource:
[mssql, mysql, postgres, mongodb, mariadb, oracle, sqs, none]
steps:
- name: Checkout repo
uses: actions/checkout@v4
@ -199,18 +202,22 @@ jobs:
env:
DATASOURCE: ${{ matrix.datasource }}
run: |
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/server)
if [ -n "$AFFECTED" ]; then
cd packages/server
if [ "${{ matrix.datasource }}" == "none" ]; then
yarn test --filter ./src/tests/filters/non-datasource-tests.js --passWithNoTests
else
yarn test --filter ./src/tests/filters/datasource-tests.js --passWithNoTests
if ${{ env.ONLY_AFFECTED_TASKS }}; then
AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/server)
if [ -z "$AFFECTED" ]; then
echo "No affected tests to run"
exit 0
fi
else
echo "No affected tests to run"
fi
FILTER="./src/tests/filters/datasource-tests.js"
if [ "${{ matrix.datasource }}" == "none" ]; then
FILTER="./src/tests/filters/non-datasource-tests.js"
fi
cd packages/server
yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions
check-pro-submodule:
runs-on: ubuntu-latest
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
@ -270,64 +277,6 @@ jobs:
echo 'All good, the submodule had been merged and setup correctly!'
fi
check-accountportal-submodule:
runs-on: ubuntu-latest
if: inputs.run_as_oss != true && (github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase')
steps:
- name: Checkout repo and submodules
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- uses: dorny/paths-filter@v3
id: changes
with:
filters: |
src:
- packages/account-portal/**
- if: steps.changes.outputs.src == 'true'
name: Check account portal commit
id: get_accountportal_commits
run: |
cd packages/account-portal
accountportal_commit=$(git rev-parse HEAD)
branch="${{ github.base_ref || github.ref_name }}"
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
base_commit=$(git rev-parse origin/master)
if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "accountportal_commit=$accountportal_commit"
echo "accountportal_commit=$accountportal_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
else
echo "Nothing to do - branch to branch merge."
fi
- name: Check submodule merged to base branch
if: ${{ steps.get_accountportal_commits.outputs.base_commit != '' }}
uses: actions/github-script@v7
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
script: |
const submoduleCommit = '${{ steps.get_accountportal_commits.outputs.accountportal_commit }}';
const baseCommit = '${{ steps.get_accountportal_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_accountportal_commits.outputs.target_branch }}" branch.');
console.error('Refer to the account portal repo to merge your changes: https://github.com/Budibase/account-portal/blob/master/docs/index.md')
process.exit(1);
} else {
console.log('All good, the submodule had been merged and setup correctly!')
}
check-lockfile:
runs-on: ubuntu-latest
steps:

1
.gitignore vendored
View File

@ -8,6 +8,7 @@ packages/server/build/oldClientVersions/**/*
packages/builder/src/components/deploy/clientVersions.json
packages/server/src/integrations/tests/utils/*.lock
packages/builder/vite.config.mjs.timestamp*
packages/account-portal
# Logs
logs

3
.gitmodules vendored
View File

@ -1,6 +1,3 @@
[submodule "packages/pro"]
path = packages/pro
url = git@github.com:Budibase/budibase-pro.git
[submodule "packages/account-portal"]
path = packages/account-portal
url = git@github.com:Budibase/account-portal.git

View File

@ -9,8 +9,4 @@ packages/backend-core/coverage
packages/builder/.routify
packages/sdk/sdk
packages/pro/coverage
packages/account-portal/packages/ui/build
packages/account-portal/packages/ui/.routify
packages/account-portal/packages/server/build
packages/account-portal/packages/server/coverage
**/*.ivm.bundle.js

10
.vscode/launch.json vendored
View File

@ -20,16 +20,6 @@
"args": ["${workspaceFolder}/packages/worker/src/index.ts"],
"cwd": "${workspaceFolder}/packages/worker"
},
{
"name": "Camunda Worker",
"type": "node",
"request": "launch",
"runtimeArgs": ["--nolazy", "-r", "ts-node/register/transpile-only"],
"args": [
"${workspaceFolder}/packages/account-portal/packages/server/src/v2/run.ts"
],
"cwd": "${workspaceFolder}/packages/account-portal/packages/server"
},
{
"type": "chrome",
"request": "launch",

View File

@ -423,9 +423,9 @@ core-js-pure@^3.20.2:
integrity sha512-12VZfFIu+wyVbBebyHmRTuEE/tZrB4tJToWcwAMcsp3h4+sHR+fMJWbKpYiCRWlhFBq+KNyO8rIV9rTkeVmznQ==
cross-spawn@^7.0.2:
version "7.0.3"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
version "7.0.6"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.6.tgz#8a58fe78f00dcd70c370451759dfbfaf03e8ee9f"
integrity sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==
dependencies:
path-key "^3.1.0"
shebang-command "^2.0.0"

View File

@ -1,12 +1,7 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "3.2.3",
"version": "3.2.9",
"npmClient": "yarn",
"packages": [
"packages/*",
"!packages/account-portal",
"packages/account-portal/packages/*"
],
"concurrency": 20,
"command": {
"publish": {

View File

@ -2,7 +2,6 @@
"$schema": "./node_modules/nx/schemas/nx-schema.json",
"tasksRunnerOptions": {
"default": {
"runner": "nx-cloud",
"options": {
"cacheableOperations": ["build", "test", "check:types"]
}

View File

@ -9,6 +9,7 @@
"@types/node": "20.10.0",
"@types/proper-lockfile": "^4.1.4",
"@typescript-eslint/parser": "6.9.0",
"depcheck": "^1.4.7",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.14.0",
"eslint": "^8.52.0",
@ -35,11 +36,10 @@
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:apps": "DISABLE_V8_COMPILE_CACHE=1 yarn build --scope @budibase/server --scope @budibase/worker",
"build:oss": "DISABLE_V8_COMPILE_CACHE=1 NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui",
"build:cli": "yarn build --scope @budibase/cli",
"build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run --concurrency 2 check:types --ignore @budibase/account-portal-server",
"check:types": "yarn check:dependencies && lerna run --concurrency 2 check:types",
"check:dependencies": "lerna run --concurrency 2 check:dependencies",
"build:sdk": "lerna run --stream build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",
@ -52,15 +52,12 @@
"kill-server": "kill-port 4001 4002",
"kill-accountportal": "kill-port 3001 4003",
"kill-all": "yarn run kill-builder && yarn run kill-server && yarn kill-accountportal",
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev": "yarn run kill-all && lerna run --parallel prebuild && lerna run --stream dev",
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
"dev:camunda": "./scripts/deploy-camunda.sh",
"dev:all": "yarn run kill-all && lerna run --stream dev",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "./scripts/devDocker.sh",
"test": "lerna run --concurrency 1 --stream test --stream",
"test": "lerna run --concurrency 1 --stream test",
"test:containers:kill": "./scripts/killTestcontainers.sh",
"lint:eslint": "eslint packages --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",
@ -98,9 +95,7 @@
},
"workspaces": {
"packages": [
"packages/*",
"!packages/account-portal",
"packages/account-portal/packages/*"
"packages/*"
]
},
"resolutions": {

@ -1 +0,0 @@
Subproject commit 9bef5d1656b4f3c991447ded6d65b0eba393a140

View File

@ -2,5 +2,3 @@
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json
!src/**
!tests/**

View File

@ -9,6 +9,13 @@
"./tests": "./dist/tests/index.js",
"./*": "./dist/*.js"
},
"typesVersions": {
"*": {
"tests": [
"dist/tests/index.d.ts"
]
}
},
"author": "Budibase",
"license": "GPL-3.0",
"scripts": {
@ -17,6 +24,7 @@
"build": "tsc -p tsconfig.build.json --paths null && node ./scripts/build.js",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020",
"check:dependencies": "node ../../scripts/depcheck.js",
"test": "bash scripts/test.sh",
"test:watch": "jest --watchAll"
},
@ -36,6 +44,7 @@
"ioredis": "5.3.2",
"joi": "17.6.0",
"jsonwebtoken": "9.0.2",
"knex": "2.4.2",
"koa-passport": "^6.0.0",
"koa-pino-logger": "4.0.0",
"lodash": "4.17.21",
@ -54,9 +63,12 @@
"semver": "^7.5.4",
"tar-fs": "2.1.1",
"uuid": "^8.3.2",
"knex": "2.4.2"
"@techpass/passport-openidconnect": "0.3.3",
"google-auth-library": "^8.0.1",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5"
},
"devDependencies": {
"@jest/types": "^29.6.3",
"@shopify/jest-koa-mocks": "5.1.1",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
@ -64,6 +76,7 @@
"@types/cookies": "0.7.8",
"@types/jest": "29.5.5",
"@types/lodash": "4.14.200",
"@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.0",
"@types/redlock": "4.0.7",
@ -74,6 +87,7 @@
"ioredis-mock": "8.9.0",
"jest": "29.7.0",
"jest-serial-runner": "1.2.1",
"nock": "^13.5.6",
"pino-pretty": "10.0.0",
"pouchdb-adapter-memory": "7.2.2",
"testcontainers": "^10.7.2",

View File

@ -19,6 +19,12 @@ function isDev() {
return process.env.NODE_ENV !== "production"
}
function parseIntSafe(number?: string) {
if (number) {
return parseInt(number)
}
}
let LOADED = false
if (!LOADED && isDev() && !isTest()) {
require("dotenv").config()
@ -231,9 +237,7 @@ const environment = {
MIN_VERSION_WITHOUT_POWER_ROLE:
process.env.MIN_VERSION_WITHOUT_POWER_ROLE || "3.0.0",
DISABLE_CONTENT_SECURITY_POLICY: process.env.DISABLE_CONTENT_SECURITY_POLICY,
// stopgap migration strategy until we can ensure backwards compat without unsafe-inline in CSP
DISABLE_CSP_UNSAFE_INLINE_SCRIPTS:
process.env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS,
BSON_BUFFER_SIZE: parseIntSafe(process.env.BSON_BUFFER_SIZE),
}
export function setEnv(newEnvVars: Partial<typeof environment>): () => void {

View File

@ -1,5 +1,4 @@
import crypto from "crypto"
import env from "../environment"
const CSP_DIRECTIVES = {
"default-src": ["'self'"],
@ -97,10 +96,6 @@ export async function contentSecurityPolicy(ctx: any, next: any) {
`'nonce-${nonce}'`,
]
if (!env.DISABLE_CSP_UNSAFE_INLINE_SCRIPTS) {
directives["script-src"].push("'unsafe-inline'")
}
ctx.state.nonce = nonce
const cspHeader = Object.entries(directives)

View File

@ -81,6 +81,7 @@
"@spectrum-css/typography": "3.0.1",
"@spectrum-css/underlay": "2.0.9",
"@spectrum-css/vars": "3.0.1",
"atrament": "^4.3.0",
"dayjs": "^1.10.8",
"easymde": "^2.16.1",
"svelte-dnd-action": "^0.9.8",

View File

@ -1,4 +1,5 @@
<script>
import { tick } from "svelte"
import {
ModalContent,
TextArea,
@ -8,7 +9,6 @@
import { automationStore, selectedAutomation } from "stores/builder"
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
import { cloneDeep } from "lodash/fp"
import { memo } from "@budibase/frontend-core"
import { AutomationEventType } from "@budibase/types"
let failedParse = null
@ -63,8 +63,7 @@
return true
}
const memoTestData = memo(parseTestData($selectedAutomation.data.testData))
$: memoTestData.set(parseTestData($selectedAutomation.data.testData))
$: testData = testData || parseTestData($selectedAutomation.data.testData)
$: {
// clone the trigger so we're not mutating the reference
@ -83,7 +82,7 @@
$: isError =
!isTriggerValid(trigger) ||
!(trigger.schema.outputs.required || []).every(
required => $memoTestData?.[required] || required !== "row"
required => testData?.[required] || required !== "row"
)
function parseTestJSON(e) {
@ -110,11 +109,10 @@
}
const testAutomation = async () => {
// Ensure testData reactiveness is processed
await tick()
try {
await automationStore.actions.test(
$selectedAutomation.data,
$memoTestData
)
await automationStore.actions.test($selectedAutomation.data, testData)
$automationStore.showTestPanel = true
} catch (error) {
notifications.error(error)
@ -152,7 +150,7 @@
{#if selectedValues}
<div class="tab-content-padding">
<AutomationBlockSetup
testData={$memoTestData}
bind:testData
{schemaProperties}
isTestModal
block={trigger}

View File

@ -503,7 +503,15 @@
row: { "Active": true, "Order Id" : 14, ... }
})
*/
const onChange = Utils.sequential(async update => {
const onChange = async update => {
if (isTestModal) {
testData = update
}
updateAutomation(update)
}
const updateAutomation = Utils.sequential(async update => {
const request = cloneDeep(update)
// Process app trigger updates
if (isTrigger && !isTestModal) {

View File

@ -371,6 +371,7 @@
delete editableColumn.relationshipType
delete editableColumn.formulaType
delete editableColumn.constraints
delete editableColumn.responseType
// Add in defaults and initial definition
const definition = fieldDefinitions[type?.toUpperCase()]
@ -386,6 +387,7 @@
editableColumn.relationshipType = RelationshipType.MANY_TO_MANY
} else if (editableColumn.type === FieldType.FORMULA) {
editableColumn.formulaType = "dynamic"
editableColumn.responseType = field.responseType || FIELDS.STRING.type
}
}
@ -767,6 +769,25 @@
</div>
</div>
{/if}
<div class="split-label">
<div class="label-length">
<Label size="M">Response Type</Label>
</div>
<div class="input-length">
<Select
bind:value={editableColumn.responseType}
options={[
FIELDS.STRING,
FIELDS.NUMBER,
FIELDS.BOOLEAN,
FIELDS.DATETIME,
]}
getOptionLabel={option => option.name}
getOptionValue={option => option.type}
tooltip="Formulas by default will return a string - however if you need another type the response can be coerced."
/>
</div>
</div>
<div class="split-label">
<div class="label-length">
<Label size="M">Formula</Label>

View File

@ -84,8 +84,8 @@
on:mouseleave
on:click={onClick}
on:contextmenu
ondragover="return false"
ondragenter="return false"
on:dragover={e => e.preventDefault()}
on:dragenter={e => e.preventDefault()}
{id}
{style}
{draggable}

View File

@ -68,8 +68,8 @@
on:scroll
bind:this={scrollRef}
on:drop={onDrop}
ondragover="return false"
ondragenter="return false"
on:dragover={e => e.preventDefault()}
on:dragenter={e => e.preventDefault()}
>
<slot />
</div>

View File

@ -33,8 +33,7 @@
"sanitize-html": "^2.13.0",
"screenfull": "^6.0.1",
"shortid": "^2.2.15",
"svelte-spa-router": "^4.0.1",
"atrament": "^4.3.0"
"svelte-spa-router": "^4.0.1"
},
"devDependencies": {
"@rollup/plugin-alias": "^5.1.0",

View File

@ -1,5 +1,7 @@
import { makePropSafe as safe } from "@budibase/string-templates"
import { API } from "../api/index.js"
import { UILogicalOperator } from "@budibase/types"
import { OnEmptyFilter } from "@budibase/frontend-core/src/constants.js"
// Map of data types to component types for search fields inside blocks
const schemaComponentMap = {
@ -60,7 +62,11 @@ export const enrichSearchColumns = async (searchColumns, schema) => {
* @param formId the ID of the form containing the search fields
*/
export const enrichFilter = (filter, columns, formId) => {
let enrichedFilter = [...(filter || [])]
if (!columns?.length) {
return filter
}
let newFilters = []
columns?.forEach(column => {
const safePath = column.name.split(".").map(safe).join(".")
const stringType = column.type === "string" || column.type === "formula"
@ -69,7 +75,7 @@ export const enrichFilter = (filter, columns, formId) => {
// For dates, use a range of the entire day selected
if (dateType) {
enrichedFilter.push({
newFilters.push({
field: column.name,
type: column.type,
operator: "rangeLow",
@ -79,7 +85,7 @@ export const enrichFilter = (filter, columns, formId) => {
const format = "YYYY-MM-DDTHH:mm:ss.SSSZ"
let hbs = `{{ date (add (date ${binding} "x") 86399999) "${format}" }}`
hbs = `{{#if ${binding} }}${hbs}{{/if}}`
enrichedFilter.push({
newFilters.push({
field: column.name,
type: column.type,
operator: "rangeHigh",
@ -90,7 +96,7 @@ export const enrichFilter = (filter, columns, formId) => {
// For other fields, do an exact match
else {
enrichedFilter.push({
newFilters.push({
field: column.name,
type: column.type,
operator: stringType ? "string" : "equal",
@ -99,5 +105,16 @@ export const enrichFilter = (filter, columns, formId) => {
})
}
})
return enrichedFilter
return {
logicalOperator: UILogicalOperator.ALL,
onEmptyFilter: OnEmptyFilter.RETURN_ALL,
groups: [
...(filter?.groups || []),
{
logicalOperator: UILogicalOperator.ALL,
filters: newFilters,
},
],
}
}

View File

@ -1,5 +1,21 @@
<script>
import TextCell from "./TextCell.svelte"
import DateCell from "./DateCell.svelte"
import NumberCell from "./NumberCell.svelte"
import BooleanCell from "./BooleanCell.svelte"
import { FieldType } from "@budibase/types"
export let schema
$: responseType = schema.responseType
</script>
<TextCell {...$$props} readonly />
{#if responseType === FieldType.NUMBER}
<NumberCell {...$$props} readonly />
{:else if responseType === FieldType.BOOLEAN}
<BooleanCell {...$$props} readonly />
{:else if responseType === FieldType.DATETIME}
<DateCell {...$$props} readonly />
{:else}
<TextCell {...$$props} readonly />
{/if}

View File

@ -13,6 +13,7 @@
"build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020",
"check:dependencies": "node ../../scripts/depcheck.js",
"build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets",
"build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers",
"build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson",
@ -49,9 +50,11 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@azure/msal-node": "^2.5.1",
"@budibase/backend-core": "0.0.0",
"@budibase/client": "0.0.0",
"@budibase/frontend-core": "0.0.0",
"@budibase/nano": "10.1.5",
"@budibase/pro": "0.0.0",
"@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0",
@ -69,6 +72,7 @@
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
"bson": "^6.9.0",
"buffer": "6.0.3",
"bull": "4.10.1",
"chokidar": "3.5.3",
@ -76,10 +80,12 @@
"cookies": "0.8.0",
"csvtojson": "2.0.10",
"curlconverter": "3.21.0",
"dayjs": "^1.10.8",
"dd-trace": "5.2.0",
"dotenv": "8.2.0",
"form-data": "4.0.0",
"global-agent": "3.0.0",
"google-auth-library": "^8.0.1",
"google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5",
"ioredis": "5.3.2",
"isolated-vm": "^4.7.2",
@ -87,6 +93,7 @@
"joi": "17.6.0",
"js-yaml": "4.1.0",
"jsonschema": "1.4.0",
"jsonwebtoken": "9.0.2",
"knex": "2.4.2",
"koa": "2.13.4",
"koa-body": "4.2.0",
@ -109,10 +116,12 @@
"pouchdb-all-dbs": "1.1.1",
"pouchdb-find": "7.2.2",
"redis": "4",
"semver": "^7.5.4",
"serialize-error": "^7.0.1",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
"socket.io": "4.7.5",
"svelte": "^4.2.10",
"tar": "6.2.1",
"tmp": "0.2.3",
"to-json-schema": "0.2.5",
@ -123,6 +132,7 @@
},
"devDependencies": {
"@babel/preset-env": "7.16.11",
"@jest/types": "^29.6.3",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@types/archiver": "6.0.2",
@ -133,6 +143,7 @@
"@types/koa__router": "8.0.8",
"@types/lodash": "4.14.200",
"@types/mssql": "9.1.4",
"@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4",
"@types/oracledb": "6.5.1",
"@types/pg": "8.6.6",
@ -141,8 +152,10 @@
"@types/tar": "6.1.5",
"@types/tmp": "0.2.6",
"@types/uuid": "8.3.4",
"chance": "^1.1.12",
"copyfiles": "2.4.1",
"docker-compose": "0.23.17",
"ioredis-mock": "8.9.0",
"jest": "29.7.0",
"jest-extended": "^4.0.2",
"jest-openapi": "0.14.2",
@ -158,7 +171,8 @@
"tsconfig-paths": "4.0.0",
"typescript": "5.5.2",
"update-dotenv": "1.1.1",
"yargs": "13.2.4"
"yargs": "13.2.4",
"@babel/core": "^7.22.5"
},
"nx": {
"targets": {

View File

@ -23,6 +23,7 @@ import {
} from "@budibase/types"
import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core"
import { findHBSBlocks } from "@budibase/string-templates"
import { ObjectId } from "mongodb"
const Runner = new Thread(ThreadType.QUERY, {
timeoutMs: env.QUERY_THREAD_TIMEOUT,
@ -223,6 +224,8 @@ export async function preview(
} else {
fieldMetadata = makeQuerySchema(FieldType.ARRAY, key)
}
} else if (field instanceof ObjectId) {
fieldMetadata = makeQuerySchema(FieldType.STRING, key)
} else {
fieldMetadata = makeQuerySchema(FieldType.JSON, key)
}

View File

@ -32,6 +32,7 @@ import {
JsonFieldSubType,
RowExportFormat,
RelationSchemaField,
FormulaResponseType,
} from "@budibase/types"
import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash"
@ -40,6 +41,7 @@ import { Knex } from "knex"
import { InternalTables } from "../../../db/utils"
import { withEnv } from "../../../environment"
import { JsTimeoutError } from "@budibase/string-templates"
import { isDate } from "../../../utilities"
jest.mock("@budibase/pro", () => ({
...jest.requireActual("@budibase/pro"),
@ -79,6 +81,10 @@ async function waitForEvent(
return await p
}
function encodeJS(binding: string) {
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
}
datasourceDescribe(
{ name: "/rows (%s)", exclude: [DatabaseName.MONGODB] },
({ config, dsProvider, isInternal, isMSSQL, isOracle }) => {
@ -3199,7 +3205,7 @@ datasourceDescribe(
describe("Formula fields", () => {
let table: Table
let otherTable: Table
let relatedRow: Row
let relatedRow: Row, mainRow: Row
beforeAll(async () => {
otherTable = await config.api.table.save(defaultTable())
@ -3227,7 +3233,7 @@ datasourceDescribe(
name: generator.word(),
description: generator.paragraph(),
})
await config.api.row.save(table._id!, {
mainRow = await config.api.row.save(table._id!, {
name: generator.word(),
description: generator.paragraph(),
tableId: table._id!,
@ -3235,6 +3241,25 @@ datasourceDescribe(
})
})
async function updateFormulaColumn(
formula: string,
opts?: { responseType?: FormulaResponseType; formulaType?: FormulaType }
) {
table = await config.api.table.save({
...table,
schema: {
...table.schema,
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: formula,
responseType: opts?.responseType,
formulaType: opts?.formulaType || FormulaType.DYNAMIC,
},
},
})
}
it("should be able to search for rows containing formulas", async () => {
const { rows } = await config.api.row.search(table._id!)
expect(rows.length).toBe(1)
@ -3242,12 +3267,72 @@ datasourceDescribe(
const row = rows[0]
expect(row.formula).toBe(relatedRow.name)
})
it("should coerce - number response type", async () => {
await updateFormulaColumn(encodeJS("return 1"), {
responseType: FieldType.NUMBER,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(1)
})
it("should coerce - boolean response type", async () => {
await updateFormulaColumn(encodeJS("return true"), {
responseType: FieldType.BOOLEAN,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(true)
})
it("should coerce - datetime response type", async () => {
await updateFormulaColumn(encodeJS("return new Date()"), {
responseType: FieldType.DATETIME,
})
const { rows } = await config.api.row.search(table._id!)
expect(isDate(rows[0].formula)).toBe(true)
})
it("should coerce - datetime with invalid value", async () => {
await updateFormulaColumn(encodeJS("return 'a'"), {
responseType: FieldType.DATETIME,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBeUndefined()
})
it("should coerce handlebars", async () => {
await updateFormulaColumn("{{ add 1 1 }}", {
responseType: FieldType.NUMBER,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(2)
})
it("should coerce handlebars to string (default)", async () => {
await updateFormulaColumn("{{ add 1 1 }}", {
responseType: FieldType.STRING,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe("2")
})
isInternal &&
it("should coerce a static handlebars formula", async () => {
await updateFormulaColumn(encodeJS("return 1"), {
responseType: FieldType.NUMBER,
formulaType: FormulaType.STATIC,
})
// save the row to store the static value
await config.api.row.save(table._id!, mainRow)
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(1)
})
})
describe("Formula JS protection", () => {
it("should time out JS execution if a single cell takes too long", async () => {
await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
const js = Buffer.from(
const js = encodeJS(
`
let i = 0;
while (true) {
@ -3255,7 +3340,7 @@ datasourceDescribe(
}
return i;
`
).toString("base64")
)
const table = await config.api.table.save(
saveTableRequest({
@ -3267,7 +3352,7 @@ datasourceDescribe(
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: `{{ js "${js}"}}`,
formula: js,
formulaType: FormulaType.DYNAMIC,
},
},
@ -3290,7 +3375,7 @@ datasourceDescribe(
JS_PER_REQUEST_TIMEOUT_MS: 80,
},
async () => {
const js = Buffer.from(
const js = encodeJS(
`
let i = 0;
while (true) {
@ -3298,7 +3383,7 @@ datasourceDescribe(
}
return i;
`
).toString("base64")
)
const table = await config.api.table.save(
saveTableRequest({
@ -3310,7 +3395,7 @@ datasourceDescribe(
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: `{{ js "${js}"}}`,
formula: js,
formulaType: FormulaType.DYNAMIC,
},
},
@ -3352,7 +3437,7 @@ datasourceDescribe(
})
it("should not carry over context between formulas", async () => {
const js = Buffer.from(`return $("[text]");`).toString("base64")
const js = encodeJS(`return $("[text]");`)
const table = await config.api.table.save(
saveTableRequest({
schema: {
@ -3363,7 +3448,7 @@ datasourceDescribe(
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: `{{ js "${js}"}}`,
formula: js,
formulaType: FormulaType.DYNAMIC,
},
},

View File

@ -28,6 +28,7 @@ import Koa from "koa"
import { Server } from "http"
import { AddressInfo } from "net"
import fs from "fs"
import bson from "bson"
let STARTUP_RAN = false
@ -193,6 +194,10 @@ export async function startup(
})
}
if (coreEnv.BSON_BUFFER_SIZE) {
bson.setInternalBufferSize(coreEnv.BSON_BUFFER_SIZE)
}
console.log("Initialising JS runner")
jsRunner.init()
}

View File

@ -136,21 +136,23 @@ class QueryRunner {
pagination = output.pagination
}
// transform as required
if (transformer) {
// We avoid invoking the transformer if it's trivial because there is a cost
// to passing data in and out of the isolate, especially for MongoDB where
// we have to bson serialise/deserialise the data.
const hasTransformer =
transformer != null &&
transformer.length > 0 &&
transformer.trim() !== "return data" &&
transformer.trim() !== "return data;"
if (transformer && hasTransformer) {
transformer = iifeWrapper(transformer)
let vm = new IsolatedVM()
if (datasource.source === SourceName.MONGODB) {
vm = vm.withParsingBson(rows)
}
const ctx = {
data: rows,
params: enrichedParameters,
}
if (transformer != null) {
rows = vm.withContext(ctx, () => vm.execute(transformer!))
}
const ctx = { data: rows, params: enrichedParameters }
rows = vm.withContext(ctx, () => vm.execute(transformer!))
}
// if the request fails we retry once, invalidating the cached value

View File

@ -161,33 +161,33 @@ async function processDefaultValues(table: Table, row: Row) {
/**
* This will coerce a value to the correct types based on the type transform map
* @param row The value to coerce
* @param value The value to coerce
* @param type The type fo coerce to
* @returns The coerced value
*/
export function coerce(row: any, type: string) {
export function coerce(value: unknown, type: string) {
// no coercion specified for type, skip it
if (!TYPE_TRANSFORM_MAP[type]) {
return row
return value
}
// eslint-disable-next-line no-prototype-builtins
if (TYPE_TRANSFORM_MAP[type].hasOwnProperty(row)) {
if (TYPE_TRANSFORM_MAP[type].hasOwnProperty(value)) {
// @ts-ignore
return TYPE_TRANSFORM_MAP[type][row]
return TYPE_TRANSFORM_MAP[type][value]
} else if (TYPE_TRANSFORM_MAP[type].parse) {
// @ts-ignore
return TYPE_TRANSFORM_MAP[type].parse(row)
return TYPE_TRANSFORM_MAP[type].parse(value)
}
return row
return value
}
/**
* Given an input route this function will apply all the necessary pre-processing to it, such as coercion
* of column values or adding auto-column values.
* @param user the user which is performing the input.
* @param userId the ID of the user which is performing the input.
* @param row the row which is being created/updated.
* @param table the table which the row is being saved to.
* @param source the table/view which the row is being saved to.
* @param opts some input processing options (like disabling auto-column relationships).
* @returns the row which has been prepared to be written to the DB.
*/

View File

@ -10,11 +10,13 @@ import {
FieldType,
OperationFieldTypeEnum,
AIOperationEnum,
AIFieldMetadata,
} from "@budibase/types"
import { OperationFields } from "@budibase/shared-core"
import tracer from "dd-trace"
import { context } from "@budibase/backend-core"
import * as pro from "@budibase/pro"
import { coerce } from "./index"
interface FormulaOpts {
dynamic?: boolean
@ -67,7 +69,18 @@ export async function processFormulas<T extends Row | Row[]>(
continue
}
const responseType = schema.responseType
const isStatic = schema.formulaType === FormulaType.STATIC
const formula = schema.formula
// coerce static values
if (isStatic) {
rows.forEach(row => {
if (row[column] && responseType) {
row[column] = coerce(row[column], responseType)
}
})
}
if (
schema.formula == null ||
@ -80,12 +93,18 @@ export async function processFormulas<T extends Row | Row[]>(
for (let i = 0; i < rows.length; i++) {
let row = rows[i]
let context = contextRows ? contextRows[i] : row
let formula = schema.formula
rows[i] = {
...row,
[column]: tracer.trace("processStringSync", {}, span => {
span?.addTags({ table_id: table._id, column, static: isStatic })
return processStringSync(formula, context)
const result = processStringSync(formula, context)
try {
return responseType ? coerce(result, responseType) : result
} catch (err: any) {
// if the coercion fails, we return empty row contents
span?.addTags({ coercionError: err.message })
return undefined
}
}),
}
}
@ -117,12 +136,13 @@ export async function processAIColumns<T extends Row | Row[]>(
continue
}
const operation = schema.operation
const aiSchema: AIFieldMetadata = schema
const rowUpdates = rows.map((row, i) => {
const contextRow = contextRows ? contextRows[i] : row
// Check if the type is bindable and pass through HBS if so
const operationField =
OperationFields[schema.operation as AIOperationEnum]
const operationField = OperationFields[operation as AIOperationEnum]
for (const key in schema) {
const fieldType = operationField[key as keyof typeof operationField]
if (fieldType === OperationFieldTypeEnum.BINDABLE_TEXT) {
@ -131,7 +151,10 @@ export async function processAIColumns<T extends Row | Row[]>(
}
}
const prompt = llm.buildPromptFromAIOperation({ schema, row })
const prompt = llm.buildPromptFromAIOperation({
schema: aiSchema,
row,
})
return tracer.trace("processAIColumn", {}, async span => {
span?.addTags({ table_id: table._id, column })

View File

@ -1,6 +1,7 @@
{
"extends": "./tsconfig.build.json",
"compilerOptions": {
"lib": ["es2020", "dom"],
"composite": true,
"baseUrl": "."
},

View File

@ -0,0 +1,4 @@
*
!dist/**/*
dist/tsconfig.build.tsbuildinfo
!package.json

View File

@ -4,7 +4,7 @@
"description": "Handlebars wrapper for Budibase templating.",
"main": "dist/bundle.cjs",
"module": "dist/bundle.mjs",
"types": "src/index.ts",
"types": "dist/index.d.ts",
"license": "MPL-2.0",
"exports": {
".": {
@ -12,12 +12,8 @@
"import": "./dist/bundle.mjs"
},
"./package.json": "./package.json",
"./iife": "./src/iife.js"
"./iife": "./dist/iife.mjs"
},
"files": [
"dist",
"src"
],
"scripts": {
"build": "tsc --emitDeclarationOnly && rollup -c",
"dev": "rollup -cw",

View File

@ -10,8 +10,8 @@ import inject from "@rollup/plugin-inject"
const production = !process.env.ROLLUP_WATCH
const config = (format, outputFile) => ({
input: "src/index.ts",
const config = (input, outputFile, format) => ({
input,
output: {
sourcemap: !production,
format,
@ -42,6 +42,7 @@ const config = (format, outputFile) => ({
})
export default [
config("cjs", "./dist/bundle.cjs"),
config("esm", "./dist/bundle.mjs"),
config("src/index.ts", "./dist/bundle.cjs", "cjs"),
config("src/index.ts", "./dist/bundle.mjs", "esm"),
config("src/iife.ts", "./dist/iife.mjs", "esm"),
]

View File

@ -134,6 +134,12 @@ export const JsonTypes = [
FieldType.ARRAY,
]
export type FormulaResponseType =
| FieldType.STRING
| FieldType.NUMBER
| FieldType.BOOLEAN
| FieldType.DATETIME
export const NumericTypes = [FieldType.NUMBER, FieldType.BIGINT]
export function isNumeric(type: FieldType) {

View File

@ -1,6 +1,6 @@
// all added by grid/table when defining the
// column size, position and whether it can be viewed
import { FieldType } from "../row"
import { FieldType, FormulaResponseType } from "../row"
import {
AutoFieldSubType,
AutoReason,
@ -115,6 +115,7 @@ export interface FormulaFieldMetadata extends BaseFieldSchema {
type: FieldType.FORMULA
formula: string
formulaType?: FormulaType
responseType?: FormulaResponseType
}
export interface AIFieldMetadata extends BaseFieldSchema {

View File

@ -16,6 +16,7 @@
"build": "node ../../scripts/build.js",
"postbuild": "copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020",
"check:dependencies": "node ../../scripts/depcheck.js",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"run:docker": "node dist/index.js",
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
@ -41,6 +42,7 @@
"@budibase/pro": "0.0.0",
"@budibase/string-templates": "0.0.0",
"@budibase/types": "0.0.0",
"@budibase/shared-core": "0.0.0",
"@koa/router": "8.0.8",
"@techpass/passport-openidconnect": "0.3.3",
"@types/global-agent": "2.1.1",
@ -53,6 +55,8 @@
"global-agent": "3.0.0",
"ical-generator": "4.1.0",
"joi": "17.6.0",
"jsonwebtoken": "9.0.2",
"knex": "2.4.2",
"koa": "2.13.4",
"koa-body": "4.2.0",
"koa-compress": "4.0.1",
@ -69,9 +73,10 @@
"pouchdb": "7.3.0",
"pouchdb-all-dbs": "1.1.1",
"server-destroy": "1.0.1",
"knex": "2.4.2"
"uuid": "^8.3.2"
},
"devDependencies": {
"@jest/types": "^29.6.3",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
"@types/jest": "29.5.5",
@ -79,6 +84,7 @@
"@types/koa": "2.13.4",
"@types/koa__router": "8.0.8",
"@types/lodash": "4.14.200",
"@types/node": "^22.9.0",
"@types/node-fetch": "2.6.4",
"@types/server-destroy": "1.0.1",
"@types/supertest": "2.0.14",
@ -87,6 +93,7 @@
"nock": "^13.5.4",
"nodemon": "2.0.15",
"rimraf": "3.0.2",
"superagent": "^10.1.1",
"supertest": "6.3.3",
"timekeeper": "2.2.0",
"typescript": "5.5.2",

View File

@ -99,6 +99,7 @@ export default server.listen(parseInt(env.PORT || "4002"), async () => {
startupLog = `${startupLog} - environment: "${env.BUDIBASE_ENVIRONMENT}"`
}
console.log(startupLog)
await initPro()
await redis.clients.init()
features.init()

29
scripts/depcheck.js Executable file
View File

@ -0,0 +1,29 @@
#!/usr/bin/node
const depcheck = require("depcheck")
function filterResults(missing) {
if (missing.src) {
delete missing.src
}
return missing
}
function printMissing(missing) {
for (let [key, value] of Object.entries(filterResults(missing))) {
console.log(`Package ${key} missing in: ${value.join(", ")}`)
}
}
depcheck(process.cwd(), {
ignorePatterns: ["dist"],
skipMissing: false,
}).then(results => {
if (Object.values(filterResults(results.missing)).length > 0) {
printMissing(results.missing)
console.error("Missing packages found - stopping.")
process.exit(-1)
} else {
console.log("No missing dependencies.")
}
})

View File

@ -1,32 +0,0 @@
#!/bin/bash
yarn global add zbctl
export ZEEBE_ADDRESS='localhost:26500'
cd ../budibase-bpm
is_camunda_ready() {
if (zbctl --insecure status 2>/dev/null) | grep -q 'Healthy'; then
return 1
else
return 0
fi
}
docker-compose up -d
echo "waiting for Camunda to be ready..."
while is_camunda_ready -eq 0; do sleep 1; done
echo "deploy processes..."
for file in src/main/resources/models/*; do
zbctl deploy resource $file --insecure
done
cd ../budibase/packages/pro
yarn && yarn build
cd ../account-portal/packages/server
yarn worker:run & cd ../../../.. && yarn dev:accountportal

View File

@ -5,12 +5,12 @@ domain=$2
if [ "$enable" = "enable" ]; then
lerna run env:localdomain:enable -- "$domain"
cd packages/account-portal
cd ../account-portal
yarn env:localdomain:enable "$domain"
cd -
else
lerna run env:localdomain:disable
cd packages/account-portal
cd ../account-portal
yarn env:localdomain:disable
cd -
fi

View File

@ -1,34 +0,0 @@
/***
* Running lerna with since and scope is not working as expected.
* For example, running the command `yarn test --scope=@budibase/worker --since=master`, with changes only on `@budibase/backend-core` will not work as expected, as it does not analyse the dependencies properly. The actual `@budibase/worker` task will not be triggered.
*
* This script is using `lerna ls` to detect all the affected projects from a given commit, and if the scoped package is affected, the actual command will be executed.
*
* The current version of the script only supports a single project in the scope.
*/
const { execSync } = require("child_process")
const argv = require("yargs").demandOption(["task", "since", "scope"]).argv
const { task, since, scope } = argv
const affectedPackages = execSync(
`yarn --silent nx show projects --affected -t ${task} --base=${since} --json`,
{
encoding: "utf-8",
}
)
const packages = JSON.parse(affectedPackages)
const isAffected = packages.includes(scope)
if (isAffected) {
console.log(`${scope} is affected. Running task "${task}"`)
execSync(`yarn ${task} --scope=${scope}`, {
stdio: "inherit",
})
} else {
console.log(`${scope} is not affected. Skipping task "${task}"`)
}

2002
yarn.lock

File diff suppressed because it is too large Load Diff