Merge branch 'master' into fix-section-crash

This commit is contained in:
Andrew Kingston 2023-11-20 08:33:01 +00:00 committed by GitHub
commit 1f569dfb84
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
73 changed files with 1032 additions and 1149 deletions

View File

@ -19,6 +19,7 @@
"bundle.js" "bundle.js"
], ],
"extends": ["eslint:recommended"], "extends": ["eslint:recommended"],
"plugins": ["import", "eslint-plugin-local-rules"],
"overrides": [ "overrides": [
{ {
"files": ["**/*.svelte"], "files": ["**/*.svelte"],
@ -30,7 +31,6 @@
"sourceType": "module", "sourceType": "module",
"allowImportExportEverywhere": true "allowImportExportEverywhere": true
} }
}, },
{ {
"files": ["**/*.ts"], "files": ["**/*.ts"],
@ -42,13 +42,22 @@
"no-case-declarations": "off", "no-case-declarations": "off",
"no-useless-escape": "off", "no-useless-escape": "off",
"no-undef": "off", "no-undef": "off",
"no-prototype-builtins": "off" "no-prototype-builtins": "off",
"local-rules/no-budibase-imports": "error"
} }
} }
], ],
"rules": { "rules": {
"no-self-assign": "off", "no-self-assign": "off",
"no-unused-vars": ["error", { "varsIgnorePattern": "^_", "argsIgnorePattern": "^_", "destructuredArrayIgnorePattern": "^_" }] "no-unused-vars": [
"error",
{
"varsIgnorePattern": "^_",
"argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_"
}
],
"import/no-relative-packages": "error"
}, },
"globals": { "globals": {
"GeolocationPositionError": true "GeolocationPositionError": true

View File

@ -19,50 +19,41 @@ env:
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }} NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }} USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }}
IS_OSS_CONTRIBUTOR: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' }}
jobs: jobs:
lint: lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 18.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 18.x
cache: "yarn" cache: yarn
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- run: yarn lint - run: yarn lint
build: build:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0 fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 18.x
cache: "yarn" cache: yarn
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
# Run build all the projects # Run build all the projects
@ -81,24 +72,18 @@ jobs:
test-libraries: test-libraries:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0 fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 18.x
cache: "yarn" cache: yarn
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- name: Test - name: Test
run: | run: |
@ -116,24 +101,18 @@ jobs:
test-worker: test-worker:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0 fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 18.x
cache: "yarn" cache: yarn
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- name: Test worker - name: Test worker
run: | run: |
@ -152,24 +131,18 @@ jobs:
test-server: test-server:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0 fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 18.x
cache: "yarn" cache: yarn
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- name: Test server - name: Test server
run: | run: |
@ -200,7 +173,7 @@ jobs:
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 18.x
cache: "yarn" cache: yarn
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- name: Test - name: Test
run: | run: |
@ -213,24 +186,23 @@ jobs:
integration-test: integration-test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout repo and submodules - name: Checkout repo
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with: with:
submodules: true submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 18.x - name: Use Node.js 18.x
uses: actions/setup-node@v3 uses: actions/setup-node@v3
with: with:
node-version: 18.x node-version: 18.x
cache: "yarn" cache: yarn
- run: yarn --frozen-lockfile - run: yarn --frozen-lockfile
- name: Build packages - name: Build packages
run: yarn build --scope @budibase/server --scope @budibase/worker run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Build backend-core for OSS contributor (required for pro)
if: ${{ env.IS_OSS_CONTRIBUTOR == 'true' }}
run: yarn build --scope @budibase/backend-core
- name: Run tests - name: Run tests
run: | run: |
cd qa-core cd qa-core

View File

@ -1,48 +0,0 @@
name: Budibase Deploy Production
on:
workflow_dispatch:
inputs:
version:
description: Budibase release version. For example - 1.0.0
required: false
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Get the latest budibase release version
id: version
run: |
if [ -z "${{ github.event.inputs.version }}" ]; then
release_version=$(cat lerna.json | jq -r '.version')
else
release_version=${{ github.event.inputs.version }}
fi
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-prod-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -1,178 +0,0 @@
name: Budibase Release
concurrency:
group: release
cancel-in-progress: false
on:
push:
tags:
- "[0-9]+.[0-9]+.[0-9]+"
# Exclude all pre-releases
- "!*[0-9]+.[0-9]+.[0-9]+-*"
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
jobs:
release-images:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- uses: actions/setup-node@v1
with:
node-version: 18.x
cache: yarn
- run: yarn install --frozen-lockfile
- name: Update versions
run: ./scripts/updateVersions.sh
- run: yarn lint
- run: yarn build
- run: yarn build:sdk
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
# setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default
git config --global user.name "Budibase Release Bot"
git config --global user.email "<>"
git submodule foreach git commit -a -m 'Release process'
git commit -a -m 'Release process'
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release
- name: "Get Current tag"
id: currenttag
run: |
version=$(./scripts/getCurrentVersion.sh)
echo "Using tag $version"
echo "version=$version" >> "$GITHUB_OUTPUT"
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Docker login
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
- name: Build worker docker
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: |
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./packages/worker/Dockerfile.v2
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/worker
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build server docker
uses: docker/build-push-action@v5
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: |
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./packages/server/Dockerfile.v2
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/apps
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
BUDIBASE_VERSION: ${{ steps.currenttag.outputs.version }}
- name: Build proxy docker
uses: docker/build-push-action@v5
with:
context: ./hosting/proxy
push: true
platforms: linux/amd64,linux/arm64
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_TAG }}
file: ./hosting/proxy/Dockerfile
cache-from: type=registry,ref=${{ env.IMAGE_NAME }}:latest
cache-to: type=inline
env:
IMAGE_NAME: budibase/proxy
IMAGE_TAG: ${{ steps.currenttag.outputs.version }}
release-helm-chart:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version 0.0.0-master --app-version "$RELEASE_VERSION" --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: peter-evans/repository-dispatch@v2
with:
repository: budibase/budibase-deploys
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"VERSION": "${{ github.ref_name }}",
"REF_NAME": "${{ github.ref_name}}"
}

View File

@ -1,125 +0,0 @@
name: Budibase Release Selfhost
on:
workflow_dispatch:
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js 18.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Docker images (Self Host)
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
release_tag=${{ env.RELEASE_VERSION }}
# Pull apps and worker images
docker pull budibase/apps:$release_tag
docker pull budibase/worker:$release_tag
docker pull budibase/proxy:$release_tag
# Tag apps and worker images
docker tag budibase/apps:$release_tag budibase/apps:$SELFHOST_TAG
docker tag budibase/worker:$release_tag budibase/worker:$SELFHOST_TAG
docker tag budibase/proxy:$release_tag budibase/proxy:$SELFHOST_TAG
# Push images
docker push budibase/apps:$SELFHOST_TAG
docker push budibase/worker:$SELFHOST_TAG
docker push budibase/proxy:$SELFHOST_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- name: Bootstrap and build (CLI)
run: |
yarn
yarn build
- name: Build OpenAPI spec
run: |
pushd packages/server
yarn
yarn specs
popd
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version "$RELEASE_VERSION" --app-version "$RELEASE_VERSION" --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Perform Github Release
uses: softprops/action-gh-release@v1
with:
name: ${{ env.RELEASE_VERSION }}
tag_name: ${{ env.RELEASE_VERSION }}
generate_release_notes: true
files: |
packages/cli/build/cli-win.exe
packages/cli/build/cli-linux
packages/cli/build/cli-macos
packages/server/specs/openapi.yaml
packages/server/specs/openapi.json
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Self Host Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Self Host."
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -1,86 +0,0 @@
name: Deploy Budibase Single Container Image to DockerHub
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 30000
swap-size-mb: 1024
remove-android: "true"
remove-dotnet: "true"
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- name: "Checkout"
uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Run Yarn
run: yarn
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build
run: yarn build
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
build-args: BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: |
TARGETBUILD=aas
BUDIBASE_VERSION=${{ env.BUDIBASE_VERSION }}
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile.v2
env:
BUDIBASE_VERSION: ${{ env.RELEASE_VERSION }}

View File

@ -0,0 +1,21 @@
module.exports = {
"no-budibase-imports": {
create: function (context) {
return {
ImportDeclaration(node) {
const importPath = node.source.value
if (
/^@budibase\/[^/]+\/.*$/.test(importPath) &&
importPath !== "@budibase/backend-core/tests"
) {
context.report({
node,
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests.`,
})
}
},
}
},
},
}

View File

@ -1,5 +1,5 @@
{ {
"version": "2.13.5", "version": "2.13.10",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -2,11 +2,17 @@
"name": "root", "name": "root",
"private": true, "private": true,
"devDependencies": { "devDependencies": {
"@babel/core": "^7.22.5",
"@babel/eslint-parser": "^7.22.5",
"@babel/preset-env": "^7.22.5",
"@esbuild-plugins/tsconfig-paths": "^0.1.2", "@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@typescript-eslint/parser": "6.7.2", "@typescript-eslint/parser": "6.7.2",
"esbuild": "^0.18.17", "esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0", "esbuild-node-externals": "^1.8.0",
"eslint": "^8.44.0", "eslint": "^8.44.0",
"eslint-plugin-import": "^2.29.0",
"eslint-plugin-local-rules": "^2.0.0",
"eslint-plugin-svelte": "^2.32.2",
"husky": "^8.0.3", "husky": "^8.0.3",
"kill-port": "^1.6.1", "kill-port": "^1.6.1",
"lerna": "7.1.1", "lerna": "7.1.1",
@ -17,12 +23,8 @@
"prettier": "2.8.8", "prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0", "prettier-plugin-svelte": "^2.3.0",
"svelte": "3.49.0", "svelte": "3.49.0",
"typescript": "5.2.2", "svelte-eslint-parser": "^0.32.0",
"@babel/core": "^7.22.5", "typescript": "5.2.2"
"@babel/eslint-parser": "^7.22.5",
"@babel/preset-env": "^7.22.5",
"eslint-plugin-svelte": "^2.32.2",
"svelte-eslint-parser": "^0.32.0"
}, },
"scripts": { "scripts": {
"preinstall": "node scripts/syncProPackage.js", "preinstall": "node scripts/syncProPackage.js",

View File

@ -19,7 +19,7 @@ async function populateFromDB(appId: string) {
return doWithDB( return doWithDB(
appId, appId,
(db: Database) => { (db: Database) => {
return db.get(DocumentType.APP_METADATA) return db.get<App>(DocumentType.APP_METADATA)
}, },
{ skip_setup: true } { skip_setup: true }
) )

View File

@ -1,6 +1,6 @@
const BaseCache = require("./base") import BaseCache from "./base"
const GENERIC = new BaseCache.default() const GENERIC = new BaseCache()
export enum CacheKey { export enum CacheKey {
CHECKLIST = "checklist", CHECKLIST = "checklist",
@ -19,6 +19,7 @@ export enum TTL {
} }
function performExport(funcName: string) { function performExport(funcName: string) {
// @ts-ignore
return (...args: any) => GENERIC[funcName](...args) return (...args: any) => GENERIC[funcName](...args)
} }

View File

@ -2,4 +2,6 @@ export * as generic from "./generic"
export * as user from "./user" export * as user from "./user"
export * as app from "./appMetadata" export * as app from "./appMetadata"
export * as writethrough from "./writethrough" export * as writethrough from "./writethrough"
export * as invite from "./invite"
export * as passwordReset from "./passwordReset"
export * from "./generic" export * from "./generic"

View File

@ -0,0 +1,86 @@
import * as utils from "../utils"
import { Duration, DurationType } from "../utils"
import env from "../environment"
import { getTenantId } from "../context"
import * as redis from "../redis/init"
const TTL_SECONDS = Duration.fromDays(7).toSeconds()
interface Invite {
email: string
info: any
}
interface InviteWithCode extends Invite {
code: string
}
/**
* Given an invite code and invite body, allow the update an existing/valid invite in redis
* @param code The invite code for an invite in redis
* @param value The body of the updated user invitation
*/
export async function updateCode(code: string, value: Invite) {
const client = await redis.getInviteClient()
await client.store(code, value, TTL_SECONDS)
}
/**
* Generates an invitation code and writes it to redis - which can later be checked for user creation.
* @param email the email address which the code is being sent to (for use later).
* @param info Information to be carried along with the invitation.
* @return returns the code that was stored to redis.
*/
export async function createCode(email: string, info: any): Promise<string> {
const code = utils.newid()
const client = await redis.getInviteClient()
await client.store(code, { email, info }, TTL_SECONDS)
return code
}
/**
* Checks that the provided invite code is valid - will return the email address of user that was invited.
* @param code the invite code that was provided as part of the link.
* @return If the code is valid then an email address will be returned.
*/
export async function getCode(code: string): Promise<Invite> {
const client = await redis.getInviteClient()
const value = (await client.get(code)) as Invite | undefined
if (!value) {
throw "Invitation is not valid or has expired, please request a new one."
}
return value
}
export async function deleteCode(code: string) {
const client = await redis.getInviteClient()
await client.delete(code)
}
/**
Get all currently available user invitations for the current tenant.
**/
export async function getInviteCodes(): Promise<InviteWithCode[]> {
const client = await redis.getInviteClient()
const invites: { key: string; value: Invite }[] = await client.scan()
const results: InviteWithCode[] = invites.map(invite => {
return {
...invite.value,
code: invite.key,
}
})
if (!env.MULTI_TENANCY) {
return results
}
const tenantId = getTenantId()
return results.filter(invite => tenantId === invite.info.tenantId)
}
export async function getExistingInvites(
emails: string[]
): Promise<InviteWithCode[]> {
return (await getInviteCodes()).filter(invite =>
emails.includes(invite.email)
)
}

View File

@ -0,0 +1,38 @@
import * as redis from "../redis/init"
import * as utils from "../utils"
import { Duration, DurationType } from "../utils"
const TTL_SECONDS = Duration.fromHours(1).toSeconds()
interface PasswordReset {
userId: string
info: any
}
/**
* Given a user ID this will store a code (that is returned) for an hour in redis.
* The user can then return this code for resetting their password (through their reset link).
* @param userId the ID of the user which is to be reset.
* @param info Info about the user/the reset process.
* @return returns the code that was stored to redis.
*/
export async function createCode(userId: string, info: any): Promise<string> {
const code = utils.newid()
const client = await redis.getPasswordResetClient()
await client.store(code, { userId, info }, TTL_SECONDS)
return code
}
/**
* Given a reset code this will lookup to redis, check if the code is valid.
* @param code The code provided via the email link.
* @return returns the user ID if it is found
*/
export async function getCode(code: string): Promise<PasswordReset> {
const client = await redis.getPasswordResetClient()
const value = (await client.get(code)) as PasswordReset | undefined
if (!value) {
throw "Provided information is not valid, cannot reset password - please try again."
}
return value
}

View File

@ -4,7 +4,7 @@ import { ContextMap } from "./types"
export default class Context { export default class Context {
static storage = new AsyncLocalStorage<ContextMap>() static storage = new AsyncLocalStorage<ContextMap>()
static run(context: ContextMap, func: any) { static run<T>(context: ContextMap, func: () => T) {
return Context.storage.run(context, () => func()) return Context.storage.run(context, () => func())
} }

View File

@ -98,17 +98,17 @@ function updateContext(updates: ContextMap): ContextMap {
return context return context
} }
async function newContext(updates: ContextMap, task: any) { async function newContext<T>(updates: ContextMap, task: () => T) {
// see if there already is a context setup // see if there already is a context setup
let context: ContextMap = updateContext(updates) let context: ContextMap = updateContext(updates)
return Context.run(context, task) return Context.run(context, task)
} }
export async function doInAutomationContext(params: { export async function doInAutomationContext<T>(params: {
appId: string appId: string
automationId: string automationId: string
task: any task: () => T
}): Promise<any> { }): Promise<T> {
const tenantId = getTenantIDFromAppID(params.appId) const tenantId = getTenantIDFromAppID(params.appId)
return newContext( return newContext(
{ {
@ -144,10 +144,10 @@ export async function doInTenant<T>(
return newContext(updates, task) return newContext(updates, task)
} }
export async function doInAppContext( export async function doInAppContext<T>(
appId: string | null, appId: string | null,
task: any task: () => T
): Promise<any> { ): Promise<T> {
if (!appId && !env.isTest()) { if (!appId && !env.isTest()) {
throw new Error("appId is required") throw new Error("appId is required")
} }
@ -165,10 +165,10 @@ export async function doInAppContext(
return newContext(updates, task) return newContext(updates, task)
} }
export async function doInIdentityContext( export async function doInIdentityContext<T>(
identity: IdentityContext, identity: IdentityContext,
task: any task: () => T
): Promise<any> { ): Promise<T> {
if (!identity) { if (!identity) {
throw new Error("identity is required") throw new Error("identity is required")
} }
@ -276,6 +276,9 @@ export function getAuditLogsDB(): Database {
*/ */
export function getAppDB(opts?: any): Database { export function getAppDB(opts?: any): Database {
const appId = getAppId() const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve app DB - no app ID.")
}
return getDB(appId, opts) return getDB(appId, opts)
} }

View File

@ -10,6 +10,7 @@ import {
DatabaseDeleteIndexOpts, DatabaseDeleteIndexOpts,
Document, Document,
isDocument, isDocument,
RowResponse,
} from "@budibase/types" } from "@budibase/types"
import { getCouchInfo } from "./connections" import { getCouchInfo } from "./connections"
import { directCouchUrlCall } from "./utils" import { directCouchUrlCall } from "./utils"
@ -48,10 +49,7 @@ export class DatabaseImpl implements Database {
private readonly couchInfo = getCouchInfo() private readonly couchInfo = getCouchInfo()
constructor(dbName?: string, opts?: DatabaseOpts, connection?: string) { constructor(dbName: string, opts?: DatabaseOpts, connection?: string) {
if (dbName == null) {
throw new Error("Database name cannot be undefined.")
}
this.name = dbName this.name = dbName
this.pouchOpts = opts || {} this.pouchOpts = opts || {}
if (connection) { if (connection) {
@ -112,7 +110,7 @@ export class DatabaseImpl implements Database {
} }
} }
async get<T>(id?: string): Promise<T | any> { async get<T extends Document>(id?: string): Promise<T> {
const db = await this.checkSetup() const db = await this.checkSetup()
if (!id) { if (!id) {
throw new Error("Unable to get doc without a valid _id.") throw new Error("Unable to get doc without a valid _id.")
@ -120,6 +118,35 @@ export class DatabaseImpl implements Database {
return this.updateOutput(() => db.get(id)) return this.updateOutput(() => db.get(id))
} }
async getMultiple<T extends Document>(
ids: string[],
opts?: { allowMissing?: boolean }
): Promise<T[]> {
// get unique
ids = [...new Set(ids)]
const response = await this.allDocs<T>({
keys: ids,
include_docs: true,
})
const rowUnavailable = (row: RowResponse<T>) => {
// row is deleted - key lookup can return this
if (row.doc == null || ("deleted" in row.value && row.value.deleted)) {
return true
}
return row.error === "not_found"
}
const rows = response.rows.filter(row => !rowUnavailable(row))
const someMissing = rows.length !== response.rows.length
// some were filtered out - means some missing
if (!opts?.allowMissing && someMissing) {
const missing = response.rows.filter(row => rowUnavailable(row))
const missingIds = missing.map(row => row.key).join(", ")
throw new Error(`Unable to get documents: ${missingIds}`)
}
return rows.map(row => row.doc!)
}
async remove(idOrDoc: string | Document, rev?: string) { async remove(idOrDoc: string | Document, rev?: string) {
const db = await this.checkSetup() const db = await this.checkSetup()
let _id: string let _id: string

View File

@ -1,10 +1,7 @@
import env from "../environment"
import { directCouchQuery, DatabaseImpl } from "./couch" import { directCouchQuery, DatabaseImpl } from "./couch"
import { CouchFindOptions, Database } from "@budibase/types" import { CouchFindOptions, Database, DatabaseOpts } from "@budibase/types"
const dbList = new Set() export function getDB(dbName: string, opts?: DatabaseOpts): Database {
export function getDB(dbName?: string, opts?: any): Database {
return new DatabaseImpl(dbName, opts) return new DatabaseImpl(dbName, opts)
} }
@ -14,7 +11,7 @@ export function getDB(dbName?: string, opts?: any): Database {
export async function doWithDB<T>( export async function doWithDB<T>(
dbName: string, dbName: string,
cb: (db: Database) => Promise<T>, cb: (db: Database) => Promise<T>,
opts = {} opts?: DatabaseOpts
) { ) {
const db = getDB(dbName, opts) const db = getDB(dbName, opts)
// need this to be async so that we can correctly close DB after all // need this to be async so that we can correctly close DB after all
@ -22,13 +19,6 @@ export async function doWithDB<T>(
return await cb(db) return await cb(db)
} }
export function allDbs() {
if (!env.isTest()) {
throw new Error("Cannot be used outside test environment.")
}
return [...dbList]
}
export async function directCouchAllDbs(queryString?: string) { export async function directCouchAllDbs(queryString?: string) {
let couchPath = "/_all_dbs" let couchPath = "/_all_dbs"
if (queryString) { if (queryString) {

View File

@ -32,6 +32,7 @@ export * as blacklist from "./blacklist"
export * as docUpdates from "./docUpdates" export * as docUpdates from "./docUpdates"
export * from "./utils/Duration" export * from "./utils/Duration"
export { SearchParams } from "./db" export { SearchParams } from "./db"
export * as docIds from "./docIds"
// Add context to tenancy for backwards compatibility // Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal // only do this for external usages to prevent internal
// circular dependencies // circular dependencies
@ -50,6 +51,7 @@ export * from "./constants"
// expose package init function // expose package init function
import * as db from "./db" import * as db from "./db"
export const init = (opts: any = {}) => { export const init = (opts: any = {}) => {
db.init(opts.db) db.init(opts.db)
} }

View File

@ -7,15 +7,19 @@ let userClient: Client,
cacheClient: Client, cacheClient: Client,
writethroughClient: Client, writethroughClient: Client,
lockClient: Client, lockClient: Client,
socketClient: Client socketClient: Client,
inviteClient: Client,
passwordResetClient: Client
async function init() { export async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init() userClient = await new Client(utils.Databases.USER_CACHE).init()
sessionClient = await new Client(utils.Databases.SESSIONS).init() sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init() appClient = await new Client(utils.Databases.APP_METADATA).init()
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init() cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
lockClient = await new Client(utils.Databases.LOCKS).init() lockClient = await new Client(utils.Databases.LOCKS).init()
writethroughClient = await new Client(utils.Databases.WRITE_THROUGH).init() writethroughClient = await new Client(utils.Databases.WRITE_THROUGH).init()
inviteClient = await new Client(utils.Databases.INVITATIONS).init()
passwordResetClient = await new Client(utils.Databases.PW_RESETS).init()
socketClient = await new Client( socketClient = await new Client(
utils.Databases.SOCKET_IO, utils.Databases.SOCKET_IO,
utils.SelectableDatabase.SOCKET_IO utils.SelectableDatabase.SOCKET_IO
@ -29,6 +33,8 @@ export async function shutdown() {
if (cacheClient) await cacheClient.finish() if (cacheClient) await cacheClient.finish()
if (writethroughClient) await writethroughClient.finish() if (writethroughClient) await writethroughClient.finish()
if (lockClient) await lockClient.finish() if (lockClient) await lockClient.finish()
if (inviteClient) await inviteClient.finish()
if (passwordResetClient) await passwordResetClient.finish()
if (socketClient) await socketClient.finish() if (socketClient) await socketClient.finish()
} }
@ -84,3 +90,17 @@ export async function getSocketClient() {
} }
return socketClient return socketClient
} }
export async function getInviteClient() {
if (!inviteClient) {
await init()
}
return inviteClient
}
export async function getPasswordResetClient() {
if (!passwordResetClient) {
await init()
}
return passwordResetClient
}

View File

@ -28,7 +28,6 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
// for testing just generate the client once // for testing just generate the client once
let CLOSED = false let CLOSED = false
let CLIENTS: { [key: number]: any } = {} let CLIENTS: { [key: number]: any } = {}
0
let CONNECTED = false let CONNECTED = false
// mock redis always connected // mock redis always connected

View File

@ -303,7 +303,7 @@ export class UserDB {
static async bulkCreate( static async bulkCreate(
newUsersRequested: User[], newUsersRequested: User[],
groups: string[] groups?: string[]
): Promise<BulkUserCreated> { ): Promise<BulkUserCreated> {
const tenantId = getTenantId() const tenantId = getTenantId()
@ -328,7 +328,7 @@ export class UserDB {
}) })
continue continue
} }
newUser.userGroups = groups newUser.userGroups = groups || []
newUsers.push(newUser) newUsers.push(newUser)
if (isCreator(newUser)) { if (isCreator(newUser)) {
newCreators.push(newUser) newCreators.push(newUser)

View File

@ -6,6 +6,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import * as dbUtils from "../db" import * as dbUtils from "../db"
import { ViewName } from "../constants" import { ViewName } from "../constants"
import { getExistingInvites } from "../cache/invite"
/** /**
* Apply a system-wide search on emails: * Apply a system-wide search on emails:
@ -26,6 +27,9 @@ export async function searchExistingEmails(emails: string[]) {
const existingAccounts = await getExistingAccounts(emails) const existingAccounts = await getExistingAccounts(emails)
matchedEmails.push(...existingAccounts.map(account => account.email)) matchedEmails.push(...existingAccounts.map(account => account.email))
const invitedEmails = await getExistingInvites(emails)
matchedEmails.push(...invitedEmails.map(invite => invite.email))
return [...new Set(matchedEmails.map(email => email.toLowerCase()))] return [...new Set(matchedEmails.map(email => email.toLowerCase()))]
} }

View File

@ -28,6 +28,9 @@ export class Duration {
toMs: () => { toMs: () => {
return Duration.convert(from, DurationType.MILLISECONDS, duration) return Duration.convert(from, DurationType.MILLISECONDS, duration)
}, },
toSeconds: () => {
return Duration.convert(from, DurationType.SECONDS, duration)
},
} }
} }

View File

@ -12,7 +12,7 @@ import { generator } from "./generator"
import { tenant } from "." import { tenant } from "."
export const newEmail = () => { export const newEmail = () => {
return `${uuid()}@test.com` return `${uuid()}@example.com`
} }
export const user = (userProps?: Partial<Omit<User, "userId">>): User => { export const user = (userProps?: Partial<Omit<User, "userId">>): User => {

View File

@ -33,6 +33,10 @@
part1: PrettyRelationshipDefinitions.MANY, part1: PrettyRelationshipDefinitions.MANY,
part2: PrettyRelationshipDefinitions.ONE, part2: PrettyRelationshipDefinitions.ONE,
}, },
[RelationshipType.ONE_TO_MANY]: {
part1: PrettyRelationshipDefinitions.ONE,
part2: PrettyRelationshipDefinitions.MANY,
},
} }
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions) let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions) let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
@ -58,7 +62,7 @@
let fromPrimary, fromForeign, fromColumn, toColumn let fromPrimary, fromForeign, fromColumn, toColumn
let throughId, throughToKey, throughFromKey let throughId, throughToKey, throughFromKey
let isManyToMany, isManyToOne, relationshipType let relationshipType
let hasValidated = false let hasValidated = false
$: fromId = null $: fromId = null
@ -85,8 +89,9 @@
$: valid = $: valid =
getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType) getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType)
$: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY $: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY
$: isManyToOne = relationshipType === RelationshipType.MANY_TO_ONE $: isManyToOne =
relationshipType === RelationshipType.MANY_TO_ONE ||
relationshipType === RelationshipType.ONE_TO_MANY
function getTable(id) { function getTable(id) {
return plusTables.find(table => table._id === id) return plusTables.find(table => table._id === id)
} }

View File

@ -53,7 +53,7 @@
selected={isViewActive(view, $isActive, $views, $viewsV2)} selected={isViewActive(view, $isActive, $views, $viewsV2)}
on:click={() => { on:click={() => {
if (view.version === 2) { if (view.version === 2) {
$goto(`./view/v2/${view.id}`) $goto(`./view/v2/${encodeURIComponent(view.id)}`)
} else { } else {
$goto(`./view/v1/${encodeURIComponent(name)}`) $goto(`./view/v1/${encodeURIComponent(name)}`)
} }

View File

@ -11,6 +11,7 @@
export let componentBindings export let componentBindings
export let bindings export let bindings
export let parseSettings export let parseSettings
export let disabled
const draggable = getContext("draggable") const draggable = getContext("draggable")
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()

View File

@ -1,26 +0,0 @@
<script>
import { DrawerContent, Drawer, Button, Icon } from "@budibase/bbui"
import ValidationDrawer from "components/design/settings/controls/ValidationEditor/ValidationDrawer.svelte"
export let column
export let type
let drawer
</script>
<Icon name="Settings" hoverable size="S" on:click={drawer.show} />
<Drawer bind:this={drawer} title="Field Validation">
<svelte:fragment slot="description">
"{column.name}" field validation
</svelte:fragment>
<Button cta slot="buttons" on:click={drawer.hide}>Save</Button>
<DrawerContent slot="body">
<div class="container">
<ValidationDrawer
slot="body"
bind:rules={column.validation}
fieldName={column.name}
{type}
/>
</div>
</DrawerContent>
</Drawer>

View File

@ -1,202 +0,0 @@
<script>
import {
Button,
Icon,
DrawerContent,
Layout,
Select,
Label,
Body,
Input,
} from "@budibase/bbui"
import { flip } from "svelte/animate"
import { dndzone } from "svelte-dnd-action"
import { generate } from "shortid"
import CellEditor from "./CellEditor.svelte"
export let columns = []
export let options = []
export let schema = {}
const flipDurationMs = 150
let dragDisabled = true
$: unselectedColumns = getUnselectedColumns(options, columns)
$: columns.forEach(column => {
if (!column.id) {
column.id = generate()
}
})
const getUnselectedColumns = (allColumns, selectedColumns) => {
let optionsObj = {}
allColumns.forEach(option => {
optionsObj[option] = true
})
selectedColumns?.forEach(column => {
delete optionsObj[column.name]
})
return Object.keys(optionsObj)
}
const getRemainingColumnOptions = selectedColumn => {
if (!selectedColumn || unselectedColumns.includes(selectedColumn)) {
return unselectedColumns
}
return [selectedColumn, ...unselectedColumns]
}
const addColumn = () => {
columns = [...columns, {}]
}
const removeColumn = id => {
columns = columns.filter(column => column.id !== id)
}
const updateColumnOrder = e => {
columns = e.detail.items
}
const handleFinalize = e => {
updateColumnOrder(e)
dragDisabled = true
}
const addAllColumns = () => {
let newColumns = columns || []
options.forEach(field => {
const fieldSchema = schema[field]
const hasCol = columns && columns.findIndex(x => x.name === field) !== -1
if (!fieldSchema?.autocolumn && !hasCol) {
newColumns.push({
name: field,
displayName: field,
})
}
})
columns = newColumns
}
const reset = () => {
columns = []
}
const getFieldType = column => {
return `validation/${schema[column.name]?.type}`
}
</script>
<DrawerContent>
<div class="container">
<Layout noPadding gap="S">
{#if columns?.length}
<Layout noPadding gap="XS">
<div class="column">
<div />
<Label size="L">Column</Label>
<Label size="L">Label</Label>
<div />
<div />
</div>
<div
class="columns"
use:dndzone={{
items: columns,
flipDurationMs,
dropTargetStyle: { outline: "none" },
dragDisabled,
}}
on:finalize={handleFinalize}
on:consider={updateColumnOrder}
>
{#each columns as column (column.id)}
<div class="column" animate:flip={{ duration: flipDurationMs }}>
<div
class="handle"
aria-label="drag-handle"
style={dragDisabled ? "cursor: grab" : "cursor: grabbing"}
on:mousedown={() => (dragDisabled = false)}
>
<Icon name="DragHandle" size="XL" />
</div>
<Select
bind:value={column.name}
placeholder="Column"
options={getRemainingColumnOptions(column.name)}
on:change={e => (column.displayName = e.detail)}
/>
<Input bind:value={column.displayName} placeholder="Label" />
<CellEditor type={getFieldType(column)} bind:column />
<Icon
name="Close"
hoverable
size="S"
on:click={() => removeColumn(column.id)}
disabled={columns.length === 1}
/>
</div>
{/each}
</div>
</Layout>
{:else}
<div class="column">
<div class="wide">
<Body size="S">Add columns to be included in your form below.</Body>
</div>
</div>
{/if}
<div class="column">
<div class="buttons wide">
<Button secondary icon="Add" on:click={addColumn}>Add column</Button>
<Button secondary quiet on:click={addAllColumns}>
Add all columns
</Button>
{#if columns?.length}
<Button secondary quiet on:click={reset}>Reset columns</Button>
{/if}
</div>
</div>
</Layout>
</div>
</DrawerContent>
<style>
.container {
width: 100%;
max-width: 600px;
margin: 0 auto;
}
.columns {
display: flex;
flex-direction: column;
justify-content: flex-start;
align-items: stretch;
gap: var(--spacing-m);
}
.column {
gap: var(--spacing-l);
display: grid;
grid-template-columns: 20px 1fr 1fr 16px 16px;
align-items: center;
border-radius: var(--border-radius-s);
transition: background-color ease-in-out 130ms;
}
.column:hover {
background-color: var(--spectrum-global-color-gray-100);
}
.handle {
display: grid;
place-items: center;
}
.wide {
grid-column: 2 / -1;
}
.buttons {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: center;
gap: var(--spacing-m);
}
</style>

View File

@ -1,4 +1,5 @@
<script> <script>
import { Toggle } from "@budibase/bbui"
import { cloneDeep, isEqual } from "lodash/fp" import { cloneDeep, isEqual } from "lodash/fp"
import { import {
getDatasourceForProvider, getDatasourceForProvider,
@ -25,6 +26,8 @@
let sanitisedValue let sanitisedValue
let unconfigured let unconfigured
let selectAll = true
$: bindings = getBindableProperties($selectedScreen, componentInstance._id) $: bindings = getBindableProperties($selectedScreen, componentInstance._id)
$: actionType = componentInstance.actionType $: actionType = componentInstance.actionType
let componentBindings = [] let componentBindings = []
@ -145,16 +148,31 @@
dispatch("change", getValidColumns(parentFieldsUpdated, options)) dispatch("change", getValidColumns(parentFieldsUpdated, options))
} }
const listUpdated = e => { const listUpdated = columns => {
const parsedColumns = getValidColumns(e.detail, options) const parsedColumns = getValidColumns(columns, options)
dispatch("change", parsedColumns) dispatch("change", parsedColumns)
} }
</script> </script>
<div class="field-configuration"> <div class="field-configuration">
<div class="toggle-all">
<span />
<Toggle
on:change={() => {
let update = fieldList.map(field => ({
...field,
active: selectAll,
}))
listUpdated(update)
}}
text=""
bind:value={selectAll}
thin
/>
</div>
{#if fieldList?.length} {#if fieldList?.length}
<DraggableList <DraggableList
on:change={listUpdated} on:change={e => listUpdated(e.detail)}
on:itemChange={processItemUpdate} on:itemChange={processItemUpdate}
items={fieldList} items={fieldList}
listItemKey={"_id"} listItemKey={"_id"}
@ -171,4 +189,21 @@
.field-configuration :global(.spectrum-ActionButton) { .field-configuration :global(.spectrum-ActionButton) {
width: 100%; width: 100%;
} }
.toggle-all {
display: flex;
justify-content: space-between;
}
.toggle-all :global(.spectrum-Switch) {
margin-right: 0px;
padding-right: calc(var(--spacing-s) - 1px);
min-height: unset;
}
.toggle-all :global(.spectrum-Switch .spectrum-Switch-switch) {
margin-top: 0px;
}
.toggle-all span {
color: var(--spectrum-global-color-gray-700);
font-size: 12px;
margin-left: calc(var(--spacing-s) - 1px);
}
</style> </style>

View File

@ -404,7 +404,7 @@
datasource = $datasources.list.find(ds => ds._id === query?.datasourceId) datasource = $datasources.list.find(ds => ds._id === query?.datasourceId)
const datasourceUrl = datasource?.config.url const datasourceUrl = datasource?.config.url
const qs = query?.fields.queryString const qs = query?.fields.queryString
breakQs = restUtils.breakQueryString(qs) breakQs = restUtils.breakQueryString(encodeURI(qs))
breakQs = runtimeToReadableMap(mergedBindings, breakQs) breakQs = runtimeToReadableMap(mergedBindings, breakQs)
const path = query.fields.path const path = query.fields.path
@ -652,7 +652,7 @@
<div class="bottom"> <div class="bottom">
<Layout paddingY="S" gap="S"> <Layout paddingY="S" gap="S">
<Divider /> <Divider />
{#if !response && Object.keys(schema).length === 0} {#if !response && Object.keys(schema || {}).length === 0}
<Heading size="M">Response</Heading> <Heading size="M">Response</Heading>
<div class="placeholder"> <div class="placeholder">
<div class="placeholder-internal"> <div class="placeholder-internal">

View File

@ -1,12 +0,0 @@
{
"globals": {
"emit": true,
"key": true
},
"env": {
"node": true
},
"extends": ["eslint:recommended"],
"rules": {
}
}

View File

@ -94,7 +94,7 @@
.align--right { .align--right {
text-align: right; text-align: right;
} }
.align-justify { .align--justify {
text-align: justify; text-align: justify;
} }
</style> </style>

View File

@ -1,8 +1,7 @@
<script> <script>
import Field from "./Field.svelte" import Field from "./Field.svelte"
import { CoreDropzone, ProgressCircle } from "@budibase/bbui" import { CoreDropzone, ProgressCircle, Helpers } from "@budibase/bbui"
import { getContext, onMount, onDestroy } from "svelte" import { getContext, onMount, onDestroy } from "svelte"
import { cloneDeep } from "../../../../../bbui/src/helpers"
export let datasourceId export let datasourceId
export let bucket export let bucket
@ -100,7 +99,7 @@
const handleChange = e => { const handleChange = e => {
localFiles = e.detail localFiles = e.detail
let files = cloneDeep(e.detail) || [] let files = Helpers.cloneDeep(e.detail) || []
// remove URL as it contains the full base64 image data // remove URL as it contains the full base64 image data
files.forEach(file => { files.forEach(file => {
if (file.type?.startsWith("image")) { if (file.type?.startsWith("image")) {

View File

@ -5,7 +5,7 @@ export const buildViewV2Endpoints = API => ({
*/ */
fetchDefinition: async viewId => { fetchDefinition: async viewId => {
return await API.get({ return await API.get({
url: `/api/v2/views/${viewId}`, url: `/api/v2/views/${encodeURIComponent(viewId)}`,
}) })
}, },
/** /**
@ -24,7 +24,7 @@ export const buildViewV2Endpoints = API => ({
*/ */
update: async view => { update: async view => {
return await API.put({ return await API.put({
url: `/api/v2/views/${view.id}`, url: `/api/v2/views/${encodeURIComponent(view.id)}`,
body: view, body: view,
}) })
}, },
@ -50,7 +50,7 @@ export const buildViewV2Endpoints = API => ({
sortType, sortType,
}) => { }) => {
return await API.post({ return await API.post({
url: `/api/v2/views/${viewId}/search`, url: `/api/v2/views/${encodeURIComponent(viewId)}/search`,
body: { body: {
query, query,
paginate, paginate,
@ -67,6 +67,8 @@ export const buildViewV2Endpoints = API => ({
* @param viewId the id of the view * @param viewId the id of the view
*/ */
delete: async viewId => { delete: async viewId => {
return await API.delete({ url: `/api/v2/views/${viewId}` }) return await API.delete({
url: `/api/v2/views/${encodeURIComponent(viewId)}`,
})
}, },
}) })

@ -1 +1 @@
Subproject commit ad9a0085bee0c4f3184acd86cadd872ea9917e88 Subproject commit 2cf6f28380d3ab22128b8a889d622fd5adfa31fc

View File

@ -337,7 +337,7 @@ export async function destroy(ctx: UserCtx) {
if (datasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE) { if (datasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE) {
await destroyInternalTablesBySourceId(datasourceId) await destroyInternalTablesBySourceId(datasourceId)
} else { } else {
const queries = await db.allDocs(getQueryParams(datasourceId, null)) const queries = await db.allDocs(getQueryParams(datasourceId))
await db.bulkDocs( await db.bulkDocs(
queries.rows.map((row: any) => ({ queries.rows.map((row: any) => ({
_id: row.id, _id: row.id,

View File

@ -1,9 +1,5 @@
import * as linkRows from "../../../db/linkedRows" import * as linkRows from "../../../db/linkedRows"
import { import { generateRowID, InternalTables } from "../../../db/utils"
generateRowID,
getMultiIDParams,
InternalTables,
} from "../../../db/utils"
import * as userController from "../user" import * as userController from "../user"
import { import {
cleanupAttachments, cleanupAttachments,
@ -240,8 +236,10 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
const linkVals = links as LinkDocumentValue[] const linkVals = links as LinkDocumentValue[]
// look up the actual rows based on the ids // look up the actual rows based on the ids
const params = getMultiIDParams(linkVals.map(linkVal => linkVal.id)) let linkedRows = await db.getMultiple<Row>(
let linkedRows = (await db.allDocs<Row>(params)).rows.map(row => row.doc!) linkVals.map(linkVal => linkVal.id),
{ allowMissing: true }
)
// get the linked tables // get the linked tables
const linkTableIds = getLinkedTableIDs(table as Table) const linkTableIds = getLinkedTableIDs(table as Table)

View File

@ -1,21 +1,9 @@
import { InternalTables } from "../../../db/utils" import { InternalTables } from "../../../db/utils"
import * as userController from "../user" import * as userController from "../user"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { import { Ctx, Row, UserCtx } from "@budibase/types"
Ctx,
FieldType,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Row,
SearchFilters,
Table,
UserCtx,
} from "@budibase/types"
import { FieldTypes, NoEmptyFilterStrings } from "../../../constants"
import sdk from "../../../sdk"
import validateJs from "validate.js" import validateJs from "validate.js"
import { cloneDeep } from "lodash/fp"
validateJs.extend(validateJs.validators.datetime, { validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) { parse: function (value: string) {

View File

@ -94,7 +94,7 @@ export async function externalTrigger(
automation: Automation, automation: Automation,
params: { fields: Record<string, any>; timeout?: number }, params: { fields: Record<string, any>; timeout?: number },
{ getResponses }: { getResponses?: boolean } = {} { getResponses }: { getResponses?: boolean } = {}
) { ): Promise<any> {
if ( if (
automation.definition != null && automation.definition != null &&
automation.definition.trigger != null && automation.definition.trigger != null &&

View File

@ -8,7 +8,7 @@ import {
getLinkedTable, getLinkedTable,
} from "./linkUtils" } from "./linkUtils"
import flatten from "lodash/flatten" import flatten from "lodash/flatten"
import { getMultiIDParams, USER_METDATA_PREFIX } from "../utils" import { USER_METDATA_PREFIX } from "../utils"
import partition from "lodash/partition" import partition from "lodash/partition"
import { getGlobalUsersFromMetadata } from "../../utilities/global" import { getGlobalUsersFromMetadata } from "../../utilities/global"
import { processFormulas } from "../../utilities/rowProcessor" import { processFormulas } from "../../utilities/rowProcessor"
@ -79,9 +79,7 @@ async function getFullLinkedDocs(links: LinkDocumentValue[]) {
const db = context.getAppDB() const db = context.getAppDB()
const linkedRowIds = links.map(link => link.id) const linkedRowIds = links.map(link => link.id)
const uniqueRowIds = [...new Set(linkedRowIds)] const uniqueRowIds = [...new Set(linkedRowIds)]
let dbRows = (await db.allDocs<Row>(getMultiIDParams(uniqueRowIds))).rows.map( let dbRows = await db.getMultiple<Row>(uniqueRowIds, { allowMissing: true })
row => row.doc!
)
// convert the unique db rows back to a full list of linked rows // convert the unique db rows back to a full list of linked rows
const linked = linkedRowIds const linked = linkedRowIds
.map(id => dbRows.find(row => row && row._id === id)) .map(id => dbRows.find(row => row && row._id === id))

View File

@ -6,6 +6,7 @@ import {
RelationshipFieldMetadata, RelationshipFieldMetadata,
VirtualDocumentType, VirtualDocumentType,
INTERNAL_TABLE_SOURCE_ID, INTERNAL_TABLE_SOURCE_ID,
DatabaseQueryOpts,
} from "@budibase/types" } from "@budibase/types"
import { FieldTypes } from "../constants" import { FieldTypes } from "../constants"
export { DocumentType, VirtualDocumentType } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types"
@ -229,7 +230,10 @@ export function getAutomationMetadataParams(otherProps: any = {}) {
/** /**
* Gets parameters for retrieving a query, this is a utility function for the getDocParams function. * Gets parameters for retrieving a query, this is a utility function for the getDocParams function.
*/ */
export function getQueryParams(datasourceId?: Optional, otherProps: any = {}) { export function getQueryParams(
datasourceId?: Optional,
otherProps: Partial<DatabaseQueryOpts> = {}
) {
if (datasourceId == null) { if (datasourceId == null) {
return getDocParams(DocumentType.QUERY, null, otherProps) return getDocParams(DocumentType.QUERY, null, otherProps)
} }
@ -256,7 +260,7 @@ export function generateMetadataID(type: string, entityId: string) {
export function getMetadataParams( export function getMetadataParams(
type: string, type: string,
entityId?: Optional, entityId?: Optional,
otherProps: any = {} otherProps: Partial<DatabaseQueryOpts> = {}
) { ) {
let docId = `${type}${SEPARATOR}` let docId = `${type}${SEPARATOR}`
if (entityId != null) { if (entityId != null) {
@ -269,7 +273,9 @@ export function generateMemoryViewID(viewName: string) {
return `${DocumentType.MEM_VIEW}${SEPARATOR}${viewName}` return `${DocumentType.MEM_VIEW}${SEPARATOR}${viewName}`
} }
export function getMemoryViewParams(otherProps: any = {}) { export function getMemoryViewParams(
otherProps: Partial<DatabaseQueryOpts> = {}
) {
return getDocParams(DocumentType.MEM_VIEW, null, otherProps) return getDocParams(DocumentType.MEM_VIEW, null, otherProps)
} }
@ -277,16 +283,6 @@ export function generatePluginID(name: string) {
return `${DocumentType.PLUGIN}${SEPARATOR}${name}` return `${DocumentType.PLUGIN}${SEPARATOR}${name}`
} }
/**
* This can be used with the db.allDocs to get a list of IDs
*/
export function getMultiIDParams(ids: string[]) {
return {
keys: ids,
include_docs: true,
}
}
/** /**
* Generates a new view ID. * Generates a new view ID.
* @returns The new view ID which the view doc can be stored under. * @returns The new view ID which the view doc can be stored under.

View File

@ -165,10 +165,22 @@ class RedisIntegration {
// commands split line by line // commands split line by line
const commands = query.json.trim().split("\n") const commands = query.json.trim().split("\n")
let pipelineCommands = [] let pipelineCommands = []
let tokenised
// process each command separately // process each command separately
for (let command of commands) { for (let command of commands) {
const tokenised = command.trim().split(" ") const valueToken = command.trim().match(/".*"/)
if (valueToken?.[0]) {
tokenised = [
...command
.substring(0, command.indexOf(valueToken[0]) - 1)
.trim()
.split(" "),
valueToken?.[0],
]
} else {
tokenised = command.trim().split(" ")
}
// Pipeline only accepts lower case commands // Pipeline only accepts lower case commands
tokenised[0] = tokenised[0].toLowerCase() tokenised[0] = tokenised[0].toLowerCase()
pipelineCommands.push(tokenised) pipelineCommands.push(tokenised)

View File

@ -85,4 +85,21 @@ describe("Redis Integration", () => {
["get", "foo"], ["get", "foo"],
]) ])
}) })
it("calls the pipeline method with double quoted phrase values", async () => {
const body = {
json: 'SET foo "What a wonderful world!"\nGET foo',
}
// ioredis-mock doesn't support pipelines
config.integration.client.pipeline = jest.fn(() => ({
exec: jest.fn(() => [[]]),
}))
await config.integration.command(body)
expect(config.integration.client.pipeline).toHaveBeenCalledWith([
["set", "foo", '"What a wonderful world!"'],
["get", "foo"],
])
})
}) })

View File

@ -9,11 +9,11 @@ function mockWorker() {
return { return {
_id: "us_uuid1", _id: "us_uuid1",
roles: { roles: {
"app_test": "BASIC", app_test: "BASIC",
}, },
roleId: "BASIC", roleId: "BASIC",
} }
} },
})) }))
} }
@ -109,7 +109,7 @@ class TestConfiguration {
path: "", path: "",
cookies: { cookies: {
set: jest.fn(), set: jest.fn(),
} },
} }
} }

View File

@ -1,5 +1,5 @@
import { context } from "@budibase/backend-core" import { context, docIds } from "@budibase/backend-core"
import { isTableId } from "@budibase/backend-core/src/docIds"
import { import {
DatabaseQueryOpts, DatabaseQueryOpts,
LinkDocument, LinkDocument,
@ -8,7 +8,7 @@ import {
import { ViewName, getQueryIndex } from "../../../../src/db/utils" import { ViewName, getQueryIndex } from "../../../../src/db/utils"
export async function fetch(tableId: string): Promise<LinkDocumentValue[]> { export async function fetch(tableId: string): Promise<LinkDocumentValue[]> {
if (!isTableId(tableId)) { if (!docIds.isTableId(tableId)) {
throw new Error(`Invalid tableId: ${tableId}`) throw new Error(`Invalid tableId: ${tableId}`)
} }
@ -24,7 +24,7 @@ export async function fetch(tableId: string): Promise<LinkDocumentValue[]> {
export async function fetchWithDocument( export async function fetchWithDocument(
tableId: string tableId: string
): Promise<LinkDocument[]> { ): Promise<LinkDocument[]> {
if (!isTableId(tableId)) { if (!docIds.isTableId(tableId)) {
throw new Error(`Invalid tableId: ${tableId}`) throw new Error(`Invalid tableId: ${tableId}`)
} }

View File

@ -1,5 +1,5 @@
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { getMultiIDParams, getTableParams } from "../../../db/utils" import { getTableParams } from "../../../db/utils"
import { import {
breakExternalTableId, breakExternalTableId,
isExternalTableID, isExternalTableID,
@ -17,6 +17,9 @@ import datasources from "../datasources"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export function processTable(table: Table): Table { export function processTable(table: Table): Table {
if (!table) {
return table
}
if (table._id && isExternalTableID(table._id)) { if (table._id && isExternalTableID(table._id)) {
return { return {
...table, ...table,
@ -73,6 +76,9 @@ export async function getExternalTable(
tableName: string tableName: string
): Promise<Table> { ): Promise<Table> {
const entities = await getExternalTablesInDatasource(datasourceId) const entities = await getExternalTablesInDatasource(datasourceId)
if (!entities[tableName]) {
throw new Error(`Unable to find table named "${tableName}"`)
}
return processTable(entities[tableName]) return processTable(entities[tableName])
} }
@ -124,10 +130,10 @@ export async function getTables(tableIds: string[]): Promise<Table[]> {
} }
if (internalTableIds.length) { if (internalTableIds.length) {
const db = context.getAppDB() const db = context.getAppDB()
const internalTableDocs = await db.allDocs<Table>( const internalTables = await db.getMultiple<Table>(internalTableIds, {
getMultiIDParams(internalTableIds) allowMissing: true,
) })
tables = tables.concat(internalTableDocs.rows.map(row => row.doc!)) tables = tables.concat(internalTables)
} }
return processTables(tables) return processTables(tables)
} }

View File

@ -17,7 +17,6 @@ import sdk from "../../../sdk"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import { EventType, updateLinks } from "../../../db/linkedRows" import { EventType, updateLinks } from "../../../db/linkedRows"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import { isInternalColumnName } from "@budibase/backend-core/src/db"
export interface MigrationResult { export interface MigrationResult {
tablesUpdated: Table[] tablesUpdated: Table[]
@ -36,7 +35,7 @@ export async function migrate(
throw new BadRequestError(`Column name cannot be empty`) throw new BadRequestError(`Column name cannot be empty`)
} }
if (isInternalColumnName(newColumn.name)) { if (dbCore.isInternalColumnName(newColumn.name)) {
throw new BadRequestError(`Column name cannot be a reserved column name`) throw new BadRequestError(`Column name cannot be a reserved column name`)
} }

View File

@ -0,0 +1,39 @@
import TestConfig from "../../tests/utilities/TestConfiguration"
import { basicTable } from "../../tests/utilities/structures"
import { Table } from "@budibase/types"
import sdk from "../"
describe("tables", () => {
const config = new TestConfig()
let table: Table
beforeAll(async () => {
await config.init()
table = await config.api.table.create(basicTable())
})
describe("getTables", () => {
it("should be able to retrieve tables", async () => {
await config.doInContext(config.appId, async () => {
const tables = await sdk.tables.getTables([table._id!])
expect(tables.length).toBe(1)
expect(tables[0]._id).toBe(table._id)
expect(tables[0].name).toBe(table.name)
})
})
it("shouldn't fail when retrieving tables that don't exist", async () => {
await config.doInContext(config.appId, async () => {
const tables = await sdk.tables.getTables(["unknown"])
expect(tables.length).toBe(0)
})
})
it("should de-duplicate the IDs", async () => {
await config.doInContext(config.appId, async () => {
const tables = await sdk.tables.getTables([table._id!, table._id!])
expect(tables.length).toBe(1)
})
})
})
})

View File

@ -510,13 +510,14 @@ class TestConfiguration {
// create dev app // create dev app
// clear any old app // clear any old app
this.appId = null this.appId = null
await context.doInAppContext(null, async () => { this.app = await context.doInAppContext(null, async () => {
this.app = await this._req( const app = await this._req(
{ name: appName }, { name: appName },
null, null,
controllers.app.create controllers.app.create
) )
this.appId = this.app?.appId! this.appId = app.appId!
return app
}) })
return await context.doInAppContext(this.appId, async () => { return await context.doInAppContext(this.appId, async () => {
// create production app // create production app
@ -525,7 +526,7 @@ class TestConfiguration {
this.allApps.push(this.prodApp) this.allApps.push(this.prodApp)
this.allApps.push(this.app) this.allApps.push(this.app)
return this.app return this.app!
}) })
} }
@ -537,7 +538,7 @@ class TestConfiguration {
return context.doInAppContext(prodAppId, async () => { return context.doInAppContext(prodAppId, async () => {
const db = context.getProdAppDB() const db = context.getProdAppDB()
return await db.get(dbCore.DocumentType.APP_METADATA) return await db.get<App>(dbCore.DocumentType.APP_METADATA)
}) })
} }

View File

@ -241,7 +241,7 @@ class Orchestrator {
}) })
} }
async execute() { async execute(): Promise<any> {
// this will retrieve from context created at start of thread // this will retrieve from context created at start of thread
this._context.env = await sdkUtils.getEnvironmentVariables() this._context.env = await sdkUtils.getEnvironmentVariables()
let automation = this._automation let automation = this._automation

View File

@ -1,4 +1,4 @@
import { getMultiIDParams, getGlobalIDFromUserMetadataID } from "../db/utils" import { getGlobalIDFromUserMetadataID } from "../db/utils"
import { import {
roles, roles,
db as dbCore, db as dbCore,
@ -96,9 +96,7 @@ export async function getRawGlobalUsers(userIds?: string[]): Promise<User[]> {
const db = tenancy.getGlobalDB() const db = tenancy.getGlobalDB()
let globalUsers: User[] let globalUsers: User[]
if (userIds) { if (userIds) {
globalUsers = (await db.allDocs<User>(getMultiIDParams(userIds))).rows.map( globalUsers = await db.getMultiple<User>(userIds, { allowMissing: true })
row => row.doc!
)
} else { } else {
globalUsers = ( globalUsers = (
await db.allDocs<User>( await db.allDocs<User>(

View File

@ -1,12 +0,0 @@
{
"globals": {
"emit": true,
"key": true
},
"env": {
"node": true
},
"extends": ["eslint:recommended"],
"rules": {
}
}

View File

@ -10,6 +10,7 @@ export interface SaveUserResponse {
export interface UserDetails { export interface UserDetails {
_id: string _id: string
email: string email: string
password?: string
} }
export interface BulkUserRequest { export interface BulkUserRequest {
@ -49,6 +50,7 @@ export type InviteUsersRequest = InviteUserRequest[]
export interface InviteUsersResponse { export interface InviteUsersResponse {
successful: { email: string }[] successful: { email: string }[]
unsuccessful: { email: string; reason: string }[] unsuccessful: { email: string; reason: string }[]
created?: boolean
} }
export interface SearchUsersRequest { export interface SearchUsersRequest {

View File

@ -122,7 +122,11 @@ export interface Database {
exists(): Promise<boolean> exists(): Promise<boolean>
checkSetup(): Promise<Nano.DocumentScope<any>> checkSetup(): Promise<Nano.DocumentScope<any>>
get<T>(id?: string): Promise<T> get<T extends Document>(id?: string): Promise<T>
getMultiple<T extends Document>(
ids: string[],
opts?: { allowMissing?: boolean }
): Promise<T[]>
remove( remove(
id: string | Document, id: string | Document,
rev?: string rev?: string

View File

@ -7,7 +7,9 @@ export enum PlanType {
/** @deprecated */ /** @deprecated */
PREMIUM = "premium", PREMIUM = "premium",
PREMIUM_PLUS = "premium_plus", PREMIUM_PLUS = "premium_plus",
/** @deprecated */
BUSINESS = "business", BUSINESS = "business",
ENTERPRISE_BASIC = "enterprise_basic",
ENTERPRISE = "enterprise", ENTERPRISE = "enterprise",
} }

View File

@ -1,8 +1,3 @@
import {
checkInviteCode,
getInviteCodes,
updateInviteCode,
} from "../../../utilities/redis"
import * as userSdk from "../../../sdk/users" import * as userSdk from "../../../sdk/users"
import env from "../../../environment" import env from "../../../environment"
import { import {
@ -16,6 +11,7 @@ import {
Ctx, Ctx,
InviteUserRequest, InviteUserRequest,
InviteUsersRequest, InviteUsersRequest,
InviteUsersResponse,
MigrationType, MigrationType,
SaveUserResponse, SaveUserResponse,
SearchUsersRequest, SearchUsersRequest,
@ -249,59 +245,35 @@ export const tenantUserLookup = async (ctx: any) => {
/* /*
Encapsulate the app user onboarding flows here. Encapsulate the app user onboarding flows here.
*/ */
export const onboardUsers = async (ctx: Ctx<InviteUsersRequest>) => { export const onboardUsers = async (
const request = ctx.request.body ctx: Ctx<InviteUsersRequest, InviteUsersResponse>
const isBulkCreate = "create" in request ) => {
if (await isEmailConfigured()) {
const emailConfigured = await isEmailConfigured() await inviteMultiple(ctx)
return
let onboardingResponse
if (isBulkCreate) {
// @ts-ignore
const { users, groups, roles } = request.create
const assignUsers = users.map((user: User) => (user.roles = roles))
onboardingResponse = await userSdk.db.bulkCreate(assignUsers, groups)
ctx.body = onboardingResponse
} else if (emailConfigured) {
onboardingResponse = await inviteMultiple(ctx)
} else if (!emailConfigured) {
const inviteRequest = ctx.request.body
let createdPasswords: any = {}
const users: User[] = inviteRequest.map(invite => {
let password = Math.random().toString(36).substring(2, 22)
// Temp password to be passed to the user.
createdPasswords[invite.email] = password
return {
email: invite.email,
password,
forceResetPassword: true,
roles: invite.userInfo.apps,
admin: invite.userInfo.admin,
builder: invite.userInfo.builder,
tenantId: tenancy.getTenantId(),
}
})
let bulkCreateReponse = await userSdk.db.bulkCreate(users, [])
// Apply temporary credentials
ctx.body = {
...bulkCreateReponse,
successful: bulkCreateReponse?.successful.map(user => {
return {
...user,
password: createdPasswords[user.email],
}
}),
created: true,
}
} else {
ctx.throw(400, "User onboarding failed")
} }
let createdPasswords: Record<string, string> = {}
const users: User[] = ctx.request.body.map(invite => {
let password = Math.random().toString(36).substring(2, 22)
createdPasswords[invite.email] = password
return {
email: invite.email,
password,
forceResetPassword: true,
roles: invite.userInfo.apps,
admin: invite.userInfo.admin,
builder: invite.userInfo.builder,
tenantId: tenancy.getTenantId(),
}
})
let resp = await userSdk.db.bulkCreate(users)
for (const user of resp.successful) {
user.password = createdPasswords[user.email]
}
ctx.body = { ...resp, created: true }
} }
export const invite = async (ctx: Ctx<InviteUserRequest>) => { export const invite = async (ctx: Ctx<InviteUserRequest>) => {
@ -328,18 +300,18 @@ export const invite = async (ctx: Ctx<InviteUserRequest>) => {
} }
export const inviteMultiple = async (ctx: Ctx<InviteUsersRequest>) => { export const inviteMultiple = async (ctx: Ctx<InviteUsersRequest>) => {
const request = ctx.request.body ctx.body = await userSdk.invite(ctx.request.body)
ctx.body = await userSdk.invite(request)
} }
export const checkInvite = async (ctx: any) => { export const checkInvite = async (ctx: any) => {
const { code } = ctx.params const { code } = ctx.params
let invite let invite
try { try {
invite = await checkInviteCode(code, false) invite = await cache.invite.getCode(code)
} catch (e) { } catch (e) {
console.warn("Error getting invite from code", e) console.warn("Error getting invite from code", e)
ctx.throw(400, "There was a problem with the invite") ctx.throw(400, "There was a problem with the invite")
return
} }
ctx.body = { ctx.body = {
email: invite.email, email: invite.email,
@ -347,14 +319,12 @@ export const checkInvite = async (ctx: any) => {
} }
export const getUserInvites = async (ctx: any) => { export const getUserInvites = async (ctx: any) => {
let invites
try { try {
// Restricted to the currently authenticated tenant // Restricted to the currently authenticated tenant
invites = await getInviteCodes() ctx.body = await cache.invite.getInviteCodes()
} catch (e) { } catch (e) {
ctx.throw(400, "There was a problem fetching invites") ctx.throw(400, "There was a problem fetching invites")
} }
ctx.body = invites
} }
export const updateInvite = async (ctx: any) => { export const updateInvite = async (ctx: any) => {
@ -365,12 +335,10 @@ export const updateInvite = async (ctx: any) => {
let invite let invite
try { try {
invite = await checkInviteCode(code, false) invite = await cache.invite.getCode(code)
if (!invite) {
throw new Error("The invite could not be retrieved")
}
} catch (e) { } catch (e) {
ctx.throw(400, "There was a problem with the invite") ctx.throw(400, "There was a problem with the invite")
return
} }
let updated = { let updated = {
@ -395,7 +363,7 @@ export const updateInvite = async (ctx: any) => {
} }
} }
await updateInviteCode(code, updated) await cache.invite.updateCode(code, updated)
ctx.body = { ...invite } ctx.body = { ...invite }
} }
@ -405,7 +373,8 @@ export const inviteAccept = async (
const { inviteCode, password, firstName, lastName } = ctx.request.body const { inviteCode, password, firstName, lastName } = ctx.request.body
try { try {
// info is an extension of the user object that was stored by global // info is an extension of the user object that was stored by global
const { email, info }: any = await checkInviteCode(inviteCode) const { email, info }: any = await cache.invite.getCode(inviteCode)
await cache.invite.deleteCode(inviteCode)
const user = await tenancy.doInTenant(info.tenantId, async () => { const user = await tenancy.doInTenant(info.tenantId, async () => {
let request: any = { let request: any = {
firstName, firstName,

View File

@ -1,11 +1,12 @@
import { InviteUsersResponse, User } from "@budibase/types" import { InviteUsersResponse, User } from "@budibase/types"
jest.mock("nodemailer")
import { TestConfiguration, mocks, structures } from "../../../../tests" import { TestConfiguration, mocks, structures } from "../../../../tests"
const sendMailMock = mocks.email.mock()
import { events, tenancy, accounts as _accounts } from "@budibase/backend-core" import { events, tenancy, accounts as _accounts } from "@budibase/backend-core"
import * as userSdk from "../../../../sdk/users" import * as userSdk from "../../../../sdk/users"
jest.mock("nodemailer")
const sendMailMock = mocks.email.mock()
const accounts = jest.mocked(_accounts) const accounts = jest.mocked(_accounts)
describe("/api/global/users", () => { describe("/api/global/users", () => {
@ -54,6 +55,24 @@ describe("/api/global/users", () => {
expect(events.user.invited).toBeCalledTimes(0) expect(events.user.invited).toBeCalledTimes(0)
}) })
it("should not invite the same user twice", async () => {
const email = structures.users.newEmail()
await config.api.users.sendUserInvite(sendMailMock, email)
jest.clearAllMocks()
const { code, res } = await config.api.users.sendUserInvite(
sendMailMock,
email,
400
)
expect(res.body.message).toBe(`Unavailable`)
expect(sendMailMock).toHaveBeenCalledTimes(0)
expect(code).toBeUndefined()
expect(events.user.invited).toBeCalledTimes(0)
})
it("should be able to create new user from invite", async () => { it("should be able to create new user from invite", async () => {
const email = structures.users.newEmail() const email = structures.users.newEmail()
const { code } = await config.api.users.sendUserInvite( const { code } = await config.api.users.sendUserInvite(
@ -101,6 +120,23 @@ describe("/api/global/users", () => {
expect(sendMailMock).toHaveBeenCalledTimes(0) expect(sendMailMock).toHaveBeenCalledTimes(0)
expect(events.user.invited).toBeCalledTimes(0) expect(events.user.invited).toBeCalledTimes(0)
}) })
it("should not be able to generate an invitation for user that has already been invited", async () => {
const email = structures.users.newEmail()
await config.api.users.sendUserInvite(sendMailMock, email)
jest.clearAllMocks()
const request = [{ email: email, userInfo: {} }]
const res = await config.api.users.sendMultiUserInvite(request)
const body = res.body as InviteUsersResponse
expect(body.successful.length).toBe(0)
expect(body.unsuccessful.length).toBe(1)
expect(body.unsuccessful[0].reason).toBe("Unavailable")
expect(sendMailMock).toHaveBeenCalledTimes(0)
expect(events.user.invited).toBeCalledTimes(0)
})
}) })
describe("POST /api/global/users/bulk", () => { describe("POST /api/global/users/bulk", () => {
@ -633,4 +669,25 @@ describe("/api/global/users", () => {
expect(response.body.message).toBe("Unable to delete self.") expect(response.body.message).toBe("Unable to delete self.")
}) })
}) })
describe("POST /api/global/users/onboard", () => {
it("should successfully onboard a user", async () => {
const response = await config.api.users.onboardUser([
{ email: structures.users.newEmail(), userInfo: {} },
])
expect(response.successful.length).toBe(1)
expect(response.unsuccessful.length).toBe(0)
})
it("should not onboard a user who has been invited", async () => {
const email = structures.users.newEmail()
await config.api.users.sendUserInvite(sendMailMock, email)
const response = await config.api.users.onboardUser([
{ email, userInfo: {} },
])
expect(response.successful.length).toBe(0)
expect(response.unsuccessful.length).toBe(1)
})
})
}) })

View File

@ -16,13 +16,13 @@ import {
queue, queue,
env as coreEnv, env as coreEnv,
timers, timers,
redis,
} from "@budibase/backend-core" } from "@budibase/backend-core"
db.init() db.init()
import Koa from "koa" import Koa from "koa"
import koaBody from "koa-body" import koaBody from "koa-body"
import http from "http" import http from "http"
import api from "./api" import api from "./api"
import * as redis from "./utilities/redis"
const koaSession = require("koa-session") const koaSession = require("koa-session")
import { userAgent } from "koa-useragent" import { userAgent } from "koa-useragent"
@ -72,8 +72,8 @@ server.on("close", async () => {
shuttingDown = true shuttingDown = true
console.log("Server Closed") console.log("Server Closed")
timers.cleanup() timers.cleanup()
await redis.shutdown() events.shutdown()
await events.shutdown() await redis.clients.shutdown()
await queue.shutdown() await queue.shutdown()
if (!env.isTest()) { if (!env.isTest()) {
process.exit(errCode) process.exit(errCode)
@ -88,7 +88,7 @@ const shutdown = () => {
export default server.listen(parseInt(env.PORT || "4002"), async () => { export default server.listen(parseInt(env.PORT || "4002"), async () => {
console.log(`Worker running on ${JSON.stringify(server.address())}`) console.log(`Worker running on ${JSON.stringify(server.address())}`)
await initPro() await initPro()
await redis.init() await redis.clients.init()
// configure events to use the pro audit log write // configure events to use the pro audit log write
// can't integrate directly into backend-core due to cyclic issues // can't integrate directly into backend-core due to cyclic issues
await events.processors.init(proSdk.auditLogs.write) await events.processors.init(proSdk.auditLogs.write)

View File

@ -6,12 +6,12 @@ import {
sessions, sessions,
tenancy, tenancy,
utils as coreUtils, utils as coreUtils,
cache,
} from "@budibase/backend-core" } from "@budibase/backend-core"
import { PlatformLogoutOpts, User } from "@budibase/types" import { PlatformLogoutOpts, User } from "@budibase/types"
import jwt from "jsonwebtoken" import jwt from "jsonwebtoken"
import * as userSdk from "../users" import * as userSdk from "../users"
import * as emails from "../../utilities/email" import * as emails from "../../utilities/email"
import * as redis from "../../utilities/redis"
import { EmailTemplatePurpose } from "../../constants" import { EmailTemplatePurpose } from "../../constants"
// LOGIN / LOGOUT // LOGIN / LOGOUT
@ -73,7 +73,7 @@ export const reset = async (email: string) => {
* Perform the user password update if the provided reset code is valid. * Perform the user password update if the provided reset code is valid.
*/ */
export const resetUpdate = async (resetCode: string, password: string) => { export const resetUpdate = async (resetCode: string, password: string) => {
const { userId } = await redis.checkResetPasswordCode(resetCode) const { userId } = await cache.passwordReset.getCode(resetCode)
let user = await userSdk.db.getUser(userId) let user = await userSdk.db.getUser(userId)
user.password = password user.password = password

View File

@ -1,5 +1,9 @@
import { events, tenancy, users as usersCore } from "@budibase/backend-core" import { events, tenancy, users as usersCore } from "@budibase/backend-core"
import { InviteUsersRequest, InviteUsersResponse } from "@budibase/types" import {
InviteUserRequest,
InviteUsersRequest,
InviteUsersResponse,
} from "@budibase/types"
import { sendEmail } from "../../utilities/email" import { sendEmail } from "../../utilities/email"
import { EmailTemplatePurpose } from "../../constants" import { EmailTemplatePurpose } from "../../constants"
@ -14,11 +18,13 @@ export async function invite(
const matchedEmails = await usersCore.searchExistingEmails( const matchedEmails = await usersCore.searchExistingEmails(
users.map(u => u.email) users.map(u => u.email)
) )
const newUsers = [] const newUsers: InviteUserRequest[] = []
// separate duplicates from new users // separate duplicates from new users
for (let user of users) { for (let user of users) {
if (matchedEmails.includes(user.email)) { if (matchedEmails.includes(user.email)) {
// This "Unavailable" is load bearing. The tests and frontend both check for it
// specifically
response.unsuccessful.push({ email: user.email, reason: "Unavailable" }) response.unsuccessful.push({ email: user.email, reason: "Unavailable" })
} else { } else {
newUsers.push(user) newUsers.push(user)

View File

@ -5,6 +5,7 @@ import {
User, User,
CreateAdminUserRequest, CreateAdminUserRequest,
SearchQuery, SearchQuery,
InviteUsersResponse,
} from "@budibase/types" } from "@budibase/types"
import structures from "../structures" import structures from "../structures"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
@ -176,4 +177,24 @@ export class UserAPI extends TestAPI {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
} }
onboardUser = async (
req: InviteUsersRequest
): Promise<InviteUsersResponse> => {
const resp = await this.request
.post(`/api/global/users/onboard`)
.send(req)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
if (resp.status !== 200) {
throw new Error(
`request failed with status ${resp.status} and body ${JSON.stringify(
resp.body
)}`
)
}
return resp.body as InviteUsersResponse
}
} }

View File

@ -3,9 +3,8 @@ import { EmailTemplatePurpose, TemplateType } from "../constants"
import { getTemplateByPurpose, EmailTemplates } from "../constants/templates" import { getTemplateByPurpose, EmailTemplates } from "../constants/templates"
import { getSettingsTemplateContext } from "./templates" import { getSettingsTemplateContext } from "./templates"
import { processString } from "@budibase/string-templates" import { processString } from "@budibase/string-templates"
import { getResetPasswordCode, getInviteCode } from "./redis"
import { User, SendEmailOpts, SMTPInnerConfig } from "@budibase/types" import { User, SendEmailOpts, SMTPInnerConfig } from "@budibase/types"
import { configs } from "@budibase/backend-core" import { configs, cache } from "@budibase/backend-core"
import ical from "ical-generator" import ical from "ical-generator"
const nodemailer = require("nodemailer") const nodemailer = require("nodemailer")
@ -61,9 +60,9 @@ async function getLinkCode(
) { ) {
switch (purpose) { switch (purpose) {
case EmailTemplatePurpose.PASSWORD_RECOVERY: case EmailTemplatePurpose.PASSWORD_RECOVERY:
return getResetPasswordCode(user._id!, info) return cache.passwordReset.createCode(user._id!, info)
case EmailTemplatePurpose.INVITATION: case EmailTemplatePurpose.INVITATION:
return getInviteCode(email, info) return cache.invite.createCode(email, info)
default: default:
return null return null
} }

View File

@ -1,150 +0,0 @@
import { redis, utils, tenancy } from "@budibase/backend-core"
import env from "../environment"
function getExpirySecondsForDB(db: string) {
switch (db) {
case redis.utils.Databases.PW_RESETS:
// a hour
return 3600
case redis.utils.Databases.INVITATIONS:
// a week
return 604800
}
}
let pwResetClient: any, invitationClient: any
function getClient(db: string) {
switch (db) {
case redis.utils.Databases.PW_RESETS:
return pwResetClient
case redis.utils.Databases.INVITATIONS:
return invitationClient
}
}
async function writeACode(db: string, value: any) {
const client = await getClient(db)
const code = utils.newid()
await client.store(code, value, getExpirySecondsForDB(db))
return code
}
async function updateACode(db: string, code: string, value: any) {
const client = await getClient(db)
await client.store(code, value, getExpirySecondsForDB(db))
}
/**
* Given an invite code and invite body, allow the update an existing/valid invite in redis
* @param inviteCode The invite code for an invite in redis
* @param value The body of the updated user invitation
*/
export async function updateInviteCode(inviteCode: string, value: string) {
await updateACode(redis.utils.Databases.INVITATIONS, inviteCode, value)
}
async function getACode(db: string, code: string, deleteCode = true) {
const client = await getClient(db)
const value = await client.get(code)
if (!value) {
throw new Error("Invalid code.")
}
if (deleteCode) {
await client.delete(code)
}
return value
}
export async function init() {
pwResetClient = new redis.Client(redis.utils.Databases.PW_RESETS)
invitationClient = new redis.Client(redis.utils.Databases.INVITATIONS)
await pwResetClient.init()
await invitationClient.init()
}
/**
* make sure redis connection is closed.
*/
export async function shutdown() {
if (pwResetClient) await pwResetClient.finish()
if (invitationClient) await invitationClient.finish()
// shutdown core clients
await redis.clients.shutdown()
console.log("Redis shutdown")
}
/**
* Given a user ID this will store a code (that is returned) for an hour in redis.
* The user can then return this code for resetting their password (through their reset link).
* @param userId the ID of the user which is to be reset.
* @param info Info about the user/the reset process.
* @return returns the code that was stored to redis.
*/
export async function getResetPasswordCode(userId: string, info: any) {
return writeACode(redis.utils.Databases.PW_RESETS, { userId, info })
}
/**
* Given a reset code this will lookup to redis, check if the code is valid and delete if required.
* @param resetCode The code provided via the email link.
* @param deleteCode If the code is used/finished with this will delete it - defaults to true.
* @return returns the user ID if it is found
*/
export async function checkResetPasswordCode(
resetCode: string,
deleteCode = true
) {
try {
return getACode(redis.utils.Databases.PW_RESETS, resetCode, deleteCode)
} catch (err) {
throw "Provided information is not valid, cannot reset password - please try again."
}
}
/**
* Generates an invitation code and writes it to redis - which can later be checked for user creation.
* @param email the email address which the code is being sent to (for use later).
* @param info Information to be carried along with the invitation.
* @return returns the code that was stored to redis.
*/
export async function getInviteCode(email: string, info: any) {
return writeACode(redis.utils.Databases.INVITATIONS, { email, info })
}
/**
* Checks that the provided invite code is valid - will return the email address of user that was invited.
* @param inviteCode the invite code that was provided as part of the link.
* @param deleteCode whether or not the code should be deleted after retrieval - defaults to true.
* @return If the code is valid then an email address will be returned.
*/
export async function checkInviteCode(
inviteCode: string,
deleteCode: boolean = true
) {
try {
return getACode(redis.utils.Databases.INVITATIONS, inviteCode, deleteCode)
} catch (err) {
throw "Invitation is not valid or has expired, please request a new one."
}
}
/**
Get all currently available user invitations for the current tenant.
**/
export async function getInviteCodes() {
const client = await getClient(redis.utils.Databases.INVITATIONS)
const invites: any[] = await client.scan()
const results = invites.map(invite => {
return {
...invite.value,
code: invite.key,
}
})
if (!env.MULTI_TENANCY) {
return results
}
const tenantId = tenancy.getTenantId()
return results.filter(invite => tenantId === invite.info.tenantId)
}

View File

@ -1,8 +1,4 @@
import { import { CreateRowParams, Row, SearchInputParams } from "../../../types"
CreateRowParams,
Row,
SearchInputParams,
} from "@budibase/server/api/controllers/public/mapping/types"
import { HeadersInit, Response } from "node-fetch" import { HeadersInit, Response } from "node-fetch"
import BudibasePublicAPIClient from "../BudibasePublicAPIClient" import BudibasePublicAPIClient from "../BudibasePublicAPIClient"
import * as fixtures from "../../fixtures" import * as fixtures from "../../fixtures"

View File

@ -1,8 +1,4 @@
import { import { Table, SearchInputParams, CreateTableParams } from "../../../types"
Table,
SearchInputParams,
CreateTableParams,
} from "@budibase/server/api/controllers/public/mapping/types"
import { HeadersInit, Response } from "node-fetch" import { HeadersInit, Response } from "node-fetch"
import { generateTable } from "../../fixtures/tables" import { generateTable } from "../../fixtures/tables"
import BudibasePublicAPIClient from "../BudibasePublicAPIClient" import BudibasePublicAPIClient from "../BudibasePublicAPIClient"

View File

@ -1,8 +1,4 @@
import { import { CreateUserParams, SearchInputParams, User } from "../../../types"
CreateUserParams,
SearchInputParams,
User,
} from "@budibase/server/api/controllers/public/mapping/types"
import { Response } from "node-fetch" import { Response } from "node-fetch"
import BudibasePublicAPIClient from "../BudibasePublicAPIClient" import BudibasePublicAPIClient from "../BudibasePublicAPIClient"
import * as fixtures from "../../fixtures" import * as fixtures from "../../fixtures"

View File

@ -1,7 +1,4 @@
import { import { CreateUserParams, User } from "../../types"
CreateUserParams,
User,
} from "@budibase/server/api/controllers/public/mapping/types"
import { generator } from "../../shared" import { generator } from "../../shared"
export const generateUser = ( export const generateUser = (

View File

@ -1,4 +1,4 @@
import { Application } from "@budibase/server/api/controllers/public/mapping/types" import { Application } from "../types"
import { Layout } from "@budibase/types" import { Layout } from "@budibase/types"
import { Screen } from "@budibase/types" import { Screen } from "@budibase/types"
// Create type for getAppPackage response // Create type for getAppPackage response

View File

@ -983,10 +983,10 @@
expect "^29.0.0" expect "^29.0.0"
pretty-format "^29.0.0" pretty-format "^29.0.0"
"@types/node-fetch@2.6.2": "@types/node-fetch@2.6.4":
version "2.6.2" version "2.6.4"
resolved "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.2.tgz" resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.4.tgz#1bc3a26de814f6bf466b25aeb1473fa1afe6a660"
integrity sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A== integrity sha512-1ZX9fcN4Rvkvgv4E6PAY5WXUFWFcRWxZa3EW83UjycOB9ljJCedb2CupIP4RZMEwF/M3eTcCihbBRgwtGbg5Rg==
dependencies: dependencies:
"@types/node" "*" "@types/node" "*"
form-data "^3.0.0" form-data "^3.0.0"
@ -3587,18 +3587,18 @@ node-duration@^1.0.4:
resolved "https://registry.npmjs.org/node-duration/-/node-duration-1.0.4.tgz" resolved "https://registry.npmjs.org/node-duration/-/node-duration-1.0.4.tgz"
integrity sha512-eUXYNSY7DL53vqfTosggWkvyIW3bhAcqBDIlolgNYlZhianXTrCL50rlUJWD1eRqkIxMppXTfiFbp+9SjpPrgA== integrity sha512-eUXYNSY7DL53vqfTosggWkvyIW3bhAcqBDIlolgNYlZhianXTrCL50rlUJWD1eRqkIxMppXTfiFbp+9SjpPrgA==
node-fetch@2, node-fetch@2.6.7, node-fetch@^2.6.7: node-fetch@2.6.0:
version "2.6.0"
resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
node-fetch@2.6.7, node-fetch@^2.6.7:
version "2.6.7" version "2.6.7"
resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz" resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
dependencies: dependencies:
whatwg-url "^5.0.0" whatwg-url "^5.0.0"
node-fetch@2.6.0:
version "2.6.0"
resolved "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.0.tgz"
integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
node-gyp-build-optional-packages@5.0.7: node-gyp-build-optional-packages@5.0.7:
version "5.0.7" version "5.0.7"
resolved "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.0.7.tgz" resolved "https://registry.npmjs.org/node-gyp-build-optional-packages/-/node-gyp-build-optional-packages-5.0.7.tgz"
@ -4893,10 +4893,10 @@ type-is@^1.6.16, type-is@^1.6.18:
media-typer "0.3.0" media-typer "0.3.0"
mime-types "~2.1.24" mime-types "~2.1.24"
typescript@4.7.3: typescript@5.2.2:
version "4.7.3" version "5.2.2"
resolved "https://registry.npmjs.org/typescript/-/typescript-4.7.3.tgz" resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78"
integrity sha512-WOkT3XYvrpXx4vMMqlD+8R8R37fZkjyLGlxavMc4iB8lrl8L0DeTcHbYgw/v0N/z9wAFsgBhcsF0ruoySS22mA== integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w==
uid2@0.0.x: uid2@0.0.x:
version "0.0.4" version "0.0.4"

View File

@ -5,14 +5,7 @@ const { execSync } = require("child_process")
let version = "0.0.0" let version = "0.0.0"
const localPro = fs.existsSync("packages/pro/src") const localPro = fs.existsSync("packages/pro/src")
if (!localPro) { if (!localPro) {
const branchName = execSync("git rev-parse --abbrev-ref HEAD") version = "latest"
.toString()
.trim()
if (branchName === "master") {
version = "latest"
} else {
version = "develop"
}
} }
// Get the list of workspaces with mismatched dependencies // Get the list of workspaces with mismatched dependencies

417
yarn.lock
View File

@ -6799,6 +6799,14 @@ array-back@^6.2.0, array-back@^6.2.2:
resolved "https://registry.yarnpkg.com/array-back/-/array-back-6.2.2.tgz#f567d99e9af88a6d3d2f9dfcc21db6f9ba9fd157" resolved "https://registry.yarnpkg.com/array-back/-/array-back-6.2.2.tgz#f567d99e9af88a6d3d2f9dfcc21db6f9ba9fd157"
integrity sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw== integrity sha512-gUAZ7HPyb4SJczXAMUXMGAvI976JoK3qEx9v1FTmeYuJj0IBiaKttG1ydtGKdkfqWkIkouke7nG8ufGy77+Cvw==
array-buffer-byte-length@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz#fabe8bc193fea865f317fe7807085ee0dee5aead"
integrity sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==
dependencies:
call-bind "^1.0.2"
is-array-buffer "^3.0.1"
array-differ@^3.0.0: array-differ@^3.0.0:
version "3.0.0" version "3.0.0"
resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b" resolved "https://registry.yarnpkg.com/array-differ/-/array-differ-3.0.0.tgz#3cbb3d0f316810eafcc47624734237d6aee4ae6b"
@ -6809,6 +6817,17 @@ array-ify@^1.0.0:
resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece" resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece"
integrity sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng== integrity sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==
array-includes@^3.1.7:
version "3.1.7"
resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.7.tgz#8cd2e01b26f7a3086cbc87271593fe921c62abda"
integrity sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
get-intrinsic "^1.2.1"
is-string "^1.0.7"
array-sort@^1.0.0: array-sort@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/array-sort/-/array-sort-1.0.0.tgz#e4c05356453f56f53512a7d1d6123f2c54c0a88a" resolved "https://registry.yarnpkg.com/array-sort/-/array-sort-1.0.0.tgz#e4c05356453f56f53512a7d1d6123f2c54c0a88a"
@ -6833,6 +6852,50 @@ array-unique@^0.3.2:
resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428"
integrity sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ== integrity sha512-SleRWjh9JUud2wH1hPs9rZBZ33H6T9HOiL0uwGnGx9FpE6wKGyfWugmbkEOIs6qWrZhg0LWeLziLrEwQJhs5mQ==
array.prototype.findlastindex@^1.2.3:
version "1.2.3"
resolved "https://registry.yarnpkg.com/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz#b37598438f97b579166940814e2c0493a4f50207"
integrity sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
es-shim-unscopables "^1.0.0"
get-intrinsic "^1.2.1"
array.prototype.flat@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.2.tgz#1476217df8cff17d72ee8f3ba06738db5b387d18"
integrity sha512-djYB+Zx2vLewY8RWlNCUdHjDXs2XOgm602S9E7P/UpHgfeHL00cRiIF+IN/G/aUJ7kGPb6yO/ErDI5V2s8iycA==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
es-shim-unscopables "^1.0.0"
array.prototype.flatmap@^1.3.2:
version "1.3.2"
resolved "https://registry.yarnpkg.com/array.prototype.flatmap/-/array.prototype.flatmap-1.3.2.tgz#c9a7c6831db8e719d6ce639190146c24bbd3e527"
integrity sha512-Ewyx0c9PmpcsByhSW4r+9zDU7sGjFc86qf/kKtuSCRdhfbk0SNLLkaT5qvcHnRGgc5NP/ly/y+qkXkqONX54CQ==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
es-shim-unscopables "^1.0.0"
arraybuffer.prototype.slice@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz#98bd561953e3e74bb34938e77647179dfe6e9f12"
integrity sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==
dependencies:
array-buffer-byte-length "^1.0.0"
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
get-intrinsic "^1.2.1"
is-array-buffer "^3.0.2"
is-shared-array-buffer "^1.0.2"
arrify@^1.0.1: arrify@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d"
@ -7727,6 +7790,15 @@ call-bind@^1.0.0, call-bind@^1.0.2:
function-bind "^1.1.1" function-bind "^1.1.1"
get-intrinsic "^1.0.2" get-intrinsic "^1.0.2"
call-bind@^1.0.4, call-bind@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.5.tgz#6fa2b7845ce0ea49bf4d8b9ef64727a2c2e2e513"
integrity sha512-C3nQxfFZxFRVoJoGKKI8y3MOEo129NQ+FgQ08iye+Mk4zNZZGdjfs06bVTr+DBSlA66Q2VEcMki/cUCP4SercQ==
dependencies:
function-bind "^1.1.2"
get-intrinsic "^1.2.1"
set-function-length "^1.1.1"
call-me-maybe@^1.0.1: call-me-maybe@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b" resolved "https://registry.yarnpkg.com/call-me-maybe/-/call-me-maybe-1.0.1.tgz#26d208ea89e37b5cbde60250a15f031c16a4d66b"
@ -9087,6 +9159,15 @@ deferred-leveldown@~5.3.0:
abstract-leveldown "~6.2.1" abstract-leveldown "~6.2.1"
inherits "^2.0.3" inherits "^2.0.3"
define-data-property@^1.0.1, define-data-property@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.1.tgz#c35f7cd0ab09883480d12ac5cb213715587800b3"
integrity sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==
dependencies:
get-intrinsic "^1.2.1"
gopd "^1.0.1"
has-property-descriptors "^1.0.0"
define-lazy-prop@^2.0.0: define-lazy-prop@^2.0.0:
version "2.0.0" version "2.0.0"
resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f" resolved "https://registry.yarnpkg.com/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz#3f7ae421129bcaaac9bc74905c98a0009ec9ee7f"
@ -9100,6 +9181,15 @@ define-properties@^1.1.3, define-properties@^1.1.4:
has-property-descriptors "^1.0.0" has-property-descriptors "^1.0.0"
object-keys "^1.1.1" object-keys "^1.1.1"
define-properties@^1.2.0:
version "1.2.1"
resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.2.1.tgz#10781cc616eb951a80a034bafcaa7377f6af2b6c"
integrity sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==
dependencies:
define-data-property "^1.0.1"
has-property-descriptors "^1.0.0"
object-keys "^1.1.1"
define-property@^0.2.5: define-property@^0.2.5:
version "0.2.5" version "0.2.5"
resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116"
@ -9465,6 +9555,13 @@ doctrine@3.0.0, doctrine@^3.0.0:
dependencies: dependencies:
esutils "^2.0.2" esutils "^2.0.2"
doctrine@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d"
integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==
dependencies:
esutils "^2.0.2"
dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9: dom-accessibility-api@^0.5.6, dom-accessibility-api@^0.5.9:
version "0.5.16" version "0.5.16"
resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz#5a7429e6066eb3664d911e33fb0e45de8eb08453" resolved "https://registry.yarnpkg.com/dom-accessibility-api/-/dom-accessibility-api-0.5.16.tgz#5a7429e6066eb3664d911e33fb0e45de8eb08453"
@ -9890,6 +9987,51 @@ es-abstract@^1.17.5, es-abstract@^1.19.0, es-abstract@^1.20.4:
unbox-primitive "^1.0.2" unbox-primitive "^1.0.2"
which-typed-array "^1.1.9" which-typed-array "^1.1.9"
es-abstract@^1.22.1:
version "1.22.3"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.22.3.tgz#48e79f5573198de6dee3589195727f4f74bc4f32"
integrity sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==
dependencies:
array-buffer-byte-length "^1.0.0"
arraybuffer.prototype.slice "^1.0.2"
available-typed-arrays "^1.0.5"
call-bind "^1.0.5"
es-set-tostringtag "^2.0.1"
es-to-primitive "^1.2.1"
function.prototype.name "^1.1.6"
get-intrinsic "^1.2.2"
get-symbol-description "^1.0.0"
globalthis "^1.0.3"
gopd "^1.0.1"
has-property-descriptors "^1.0.0"
has-proto "^1.0.1"
has-symbols "^1.0.3"
hasown "^2.0.0"
internal-slot "^1.0.5"
is-array-buffer "^3.0.2"
is-callable "^1.2.7"
is-negative-zero "^2.0.2"
is-regex "^1.1.4"
is-shared-array-buffer "^1.0.2"
is-string "^1.0.7"
is-typed-array "^1.1.12"
is-weakref "^1.0.2"
object-inspect "^1.13.1"
object-keys "^1.1.1"
object.assign "^4.1.4"
regexp.prototype.flags "^1.5.1"
safe-array-concat "^1.0.1"
safe-regex-test "^1.0.0"
string.prototype.trim "^1.2.8"
string.prototype.trimend "^1.0.7"
string.prototype.trimstart "^1.0.7"
typed-array-buffer "^1.0.0"
typed-array-byte-length "^1.0.0"
typed-array-byte-offset "^1.0.0"
typed-array-length "^1.0.4"
unbox-primitive "^1.0.2"
which-typed-array "^1.1.13"
es-aggregate-error@^1.0.8: es-aggregate-error@^1.0.8:
version "1.0.9" version "1.0.9"
resolved "https://registry.yarnpkg.com/es-aggregate-error/-/es-aggregate-error-1.0.9.tgz#b50925cdf78c8a634bd766704f6f7825902be3d9" resolved "https://registry.yarnpkg.com/es-aggregate-error/-/es-aggregate-error-1.0.9.tgz#b50925cdf78c8a634bd766704f6f7825902be3d9"
@ -9932,6 +10074,13 @@ es-set-tostringtag@^2.0.1:
has "^1.0.3" has "^1.0.3"
has-tostringtag "^1.0.0" has-tostringtag "^1.0.0"
es-shim-unscopables@^1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.2.tgz#1f6942e71ecc7835ed1c8a83006d8771a63a3763"
integrity sha512-J3yBRXCzDu4ULnQwxyToo/OjdMx6akgVC7K6few0a7F/0wLtmKKN7I73AH5T2836UuXRqN7Qg+IIUw/+YJksRw==
dependencies:
hasown "^2.0.0"
es-to-primitive@^1.2.1: es-to-primitive@^1.2.1:
version "1.2.1" version "1.2.1"
resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a"
@ -10141,6 +10290,50 @@ escodegen@^2.0.0:
optionalDependencies: optionalDependencies:
source-map "~0.6.1" source-map "~0.6.1"
eslint-import-resolver-node@^0.3.9:
version "0.3.9"
resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz#d4eaac52b8a2e7c3cd1903eb00f7e053356118ac"
integrity sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==
dependencies:
debug "^3.2.7"
is-core-module "^2.13.0"
resolve "^1.22.4"
eslint-module-utils@^2.8.0:
version "2.8.0"
resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz#e439fee65fc33f6bba630ff621efc38ec0375c49"
integrity sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==
dependencies:
debug "^3.2.7"
eslint-plugin-import@^2.29.0:
version "2.29.0"
resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.29.0.tgz#8133232e4329ee344f2f612885ac3073b0b7e155"
integrity sha512-QPOO5NO6Odv5lpoTkddtutccQjysJuFxoPS7fAHO+9m9udNHvTCPSAMW9zGAYj8lAIdr40I8yPCdUYrncXtrwg==
dependencies:
array-includes "^3.1.7"
array.prototype.findlastindex "^1.2.3"
array.prototype.flat "^1.3.2"
array.prototype.flatmap "^1.3.2"
debug "^3.2.7"
doctrine "^2.1.0"
eslint-import-resolver-node "^0.3.9"
eslint-module-utils "^2.8.0"
hasown "^2.0.0"
is-core-module "^2.13.1"
is-glob "^4.0.3"
minimatch "^3.1.2"
object.fromentries "^2.0.7"
object.groupby "^1.0.1"
object.values "^1.1.7"
semver "^6.3.1"
tsconfig-paths "^3.14.2"
eslint-plugin-local-rules@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/eslint-plugin-local-rules/-/eslint-plugin-local-rules-2.0.0.tgz#cda95d7616cc0e2609d76c347c187ca2be1e252e"
integrity sha512-sWueme0kUcP0JC1+6OBDQ9edBDVFJR92WJHSRbhiRExlenMEuUisdaVBPR+ItFBFXo2Pdw6FD2UfGZWkz8e93g==
eslint-plugin-svelte@^2.32.2: eslint-plugin-svelte@^2.32.2:
version "2.32.2" version "2.32.2"
resolved "https://registry.yarnpkg.com/eslint-plugin-svelte/-/eslint-plugin-svelte-2.32.2.tgz#d8f1352b55967445ee8d57aaee55f99712696a30" resolved "https://registry.yarnpkg.com/eslint-plugin-svelte/-/eslint-plugin-svelte-2.32.2.tgz#d8f1352b55967445ee8d57aaee55f99712696a30"
@ -11068,6 +11261,11 @@ function-bind@^1.1.1:
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
function-bind@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c"
integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==
function.prototype.name@^1.1.5: function.prototype.name@^1.1.5:
version "1.1.5" version "1.1.5"
resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621" resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.5.tgz#cce0505fe1ffb80503e6f9e46cc64e46a12a9621"
@ -11078,6 +11276,16 @@ function.prototype.name@^1.1.5:
es-abstract "^1.19.0" es-abstract "^1.19.0"
functions-have-names "^1.2.2" functions-have-names "^1.2.2"
function.prototype.name@^1.1.6:
version "1.1.6"
resolved "https://registry.yarnpkg.com/function.prototype.name/-/function.prototype.name-1.1.6.tgz#cdf315b7d90ee77a4c6ee216c3c3362da07533fd"
integrity sha512-Z5kx79swU5P27WEayXM1tBi5Ze/lbIyiNgU3qyXUOf9b2rgXYyF9Dy9Cx+IQv/Lc8WCG6L82zwUPpSS9hGehIg==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
functions-have-names "^1.2.3"
functional-red-black-tree@^1.0.1: functional-red-black-tree@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
@ -11234,6 +11442,16 @@ get-intrinsic@^1.0.2, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3, get-intrinsic@
has "^1.0.3" has "^1.0.3"
has-symbols "^1.0.3" has-symbols "^1.0.3"
get-intrinsic@^1.2.1, get-intrinsic@^1.2.2:
version "1.2.2"
resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.2.tgz#281b7622971123e1ef4b3c90fd7539306da93f3b"
integrity sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==
dependencies:
function-bind "^1.1.2"
has-proto "^1.0.1"
has-symbols "^1.0.3"
hasown "^2.0.0"
get-object@^0.2.0: get-object@^0.2.0:
version "0.2.0" version "0.2.0"
resolved "https://registry.yarnpkg.com/get-object/-/get-object-0.2.0.tgz#d92ff7d5190c64530cda0543dac63a3d47fe8c0c" resolved "https://registry.yarnpkg.com/get-object/-/get-object-0.2.0.tgz#d92ff7d5190c64530cda0543dac63a3d47fe8c0c"
@ -11937,6 +12155,13 @@ hash.js@^1.0.0, hash.js@^1.0.3:
inherits "^2.0.3" inherits "^2.0.3"
minimalistic-assert "^1.0.1" minimalistic-assert "^1.0.1"
hasown@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.0.tgz#f4c513d454a57b7c7e1650778de226b11700546c"
integrity sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==
dependencies:
function-bind "^1.1.2"
help-me@^4.0.1: help-me@^4.0.1:
version "4.2.0" version "4.2.0"
resolved "https://registry.yarnpkg.com/help-me/-/help-me-4.2.0.tgz#50712bfd799ff1854ae1d312c36eafcea85b0563" resolved "https://registry.yarnpkg.com/help-me/-/help-me-4.2.0.tgz#50712bfd799ff1854ae1d312c36eafcea85b0563"
@ -12407,6 +12632,15 @@ internal-slot@^1.0.4:
has "^1.0.3" has "^1.0.3"
side-channel "^1.0.4" side-channel "^1.0.4"
internal-slot@^1.0.5:
version "1.0.6"
resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.6.tgz#37e756098c4911c5e912b8edbf71ed3aa116f930"
integrity sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==
dependencies:
get-intrinsic "^1.2.2"
hasown "^2.0.0"
side-channel "^1.0.4"
interpret@^2.2.0: interpret@^2.2.0:
version "2.2.0" version "2.2.0"
resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9" resolved "https://registry.yarnpkg.com/interpret/-/interpret-2.2.0.tgz#1a78a0b5965c40a5416d007ad6f50ad27c417df9"
@ -12508,7 +12742,7 @@ is-arguments@^1.1.1:
call-bind "^1.0.2" call-bind "^1.0.2"
has-tostringtag "^1.0.0" has-tostringtag "^1.0.0"
is-array-buffer@^3.0.1: is-array-buffer@^3.0.1, is-array-buffer@^3.0.2:
version "3.0.2" version "3.0.2"
resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe" resolved "https://registry.yarnpkg.com/is-array-buffer/-/is-array-buffer-3.0.2.tgz#f2653ced8412081638ecb0ebbd0c41c6e0aecbbe"
integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w== integrity sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==
@ -12585,6 +12819,13 @@ is-core-module@2.9.0, is-core-module@^2.5.0, is-core-module@^2.8.1, is-core-modu
dependencies: dependencies:
has "^1.0.3" has "^1.0.3"
is-core-module@^2.13.0, is-core-module@^2.13.1:
version "2.13.1"
resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.13.1.tgz#ad0d7532c6fea9da1ebdc82742d74525c6273384"
integrity sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==
dependencies:
hasown "^2.0.0"
is-data-descriptor@^0.1.4: is-data-descriptor@^0.1.4:
version "0.1.4" version "0.1.4"
resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56"
@ -12935,6 +13176,13 @@ is-typed-array@^1.1.10, is-typed-array@^1.1.9:
gopd "^1.0.1" gopd "^1.0.1"
has-tostringtag "^1.0.0" has-tostringtag "^1.0.0"
is-typed-array@^1.1.12:
version "1.1.12"
resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.12.tgz#d0bab5686ef4a76f7a73097b95470ab199c57d4a"
integrity sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==
dependencies:
which-typed-array "^1.1.11"
is-typedarray@^1.0.0, is-typedarray@~1.0.0: is-typedarray@^1.0.0, is-typedarray@~1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a"
@ -13794,10 +14042,10 @@ json-stringify-safe@^5.0.1, json-stringify-safe@~5.0.1:
resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb"
integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA== integrity sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==
json5@^1.0.1: json5@^1.0.1, json5@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593" resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.2.tgz#63d98d60f21b313b77c4d6da18bfa69d80e1d593"
integrity "sha1-Y9mNYPIbMTt3xNbaGL+mnYDh1ZM= sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==" integrity sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==
dependencies: dependencies:
minimist "^1.2.0" minimist "^1.2.0"
@ -16298,6 +16546,11 @@ object-inspect@^1.12.2, object-inspect@^1.9.0:
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.3.tgz#ba62dffd67ee256c8c086dfae69e016cd1f198b9"
integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g== integrity sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==
object-inspect@^1.13.1:
version "1.13.1"
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2"
integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==
object-is@^1.1.5: object-is@^1.1.5:
version "1.1.5" version "1.1.5"
resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.5.tgz#b9deeaa5fc7f1846a0faecdceec138e5778f53ac"
@ -16349,6 +16602,25 @@ object.assign@^4.1.4:
has-symbols "^1.0.3" has-symbols "^1.0.3"
object-keys "^1.1.1" object-keys "^1.1.1"
object.fromentries@^2.0.7:
version "2.0.7"
resolved "https://registry.yarnpkg.com/object.fromentries/-/object.fromentries-2.0.7.tgz#71e95f441e9a0ea6baf682ecaaf37fa2a8d7e616"
integrity sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
object.groupby@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/object.groupby/-/object.groupby-1.0.1.tgz#d41d9f3c8d6c778d9cbac86b4ee9f5af103152ee"
integrity sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
get-intrinsic "^1.2.1"
object.pick@^1.3.0: object.pick@^1.3.0:
version "1.3.0" version "1.3.0"
resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747"
@ -16356,6 +16628,15 @@ object.pick@^1.3.0:
dependencies: dependencies:
isobject "^3.0.1" isobject "^3.0.1"
object.values@^1.1.7:
version "1.1.7"
resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.7.tgz#617ed13272e7e1071b43973aa1655d9291b8442a"
integrity sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
octal@^1.0.0: octal@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/octal/-/octal-1.0.0.tgz#63e7162a68efbeb9e213588d58e989d1e5c4530b" resolved "https://registry.yarnpkg.com/octal/-/octal-1.0.0.tgz#63e7162a68efbeb9e213588d58e989d1e5c4530b"
@ -18668,6 +18949,15 @@ regexp.prototype.flags@^1.4.3:
define-properties "^1.1.3" define-properties "^1.1.3"
functions-have-names "^1.2.2" functions-have-names "^1.2.2"
regexp.prototype.flags@^1.5.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz#90ce989138db209f81492edd734183ce99f9677e"
integrity sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
set-function-name "^2.0.0"
regexparam@2.0.1: regexparam@2.0.1:
version "2.0.1" version "2.0.1"
resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.1.tgz#c912f5dae371e3798100b3c9ce22b7414d0889fa" resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.1.tgz#c912f5dae371e3798100b3c9ce22b7414d0889fa"
@ -18879,6 +19169,15 @@ resolve@^1.10.0, resolve@^1.11.1, resolve@^1.14.2, resolve@^1.17.0, resolve@^1.1
path-parse "^1.0.7" path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0" supports-preserve-symlinks-flag "^1.0.0"
resolve@^1.22.4:
version "1.22.8"
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.8.tgz#b6c87a9f2aa06dfab52e3d70ac8cde321fa5a48d"
integrity sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==
dependencies:
is-core-module "^2.13.0"
path-parse "^1.0.7"
supports-preserve-symlinks-flag "^1.0.0"
responselike@1.0.2, responselike@^1.0.2: responselike@1.0.2, responselike@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7" resolved "https://registry.yarnpkg.com/responselike/-/responselike-1.0.2.tgz#918720ef3b631c5642be068f15ade5a46f4ba1e7"
@ -19153,6 +19452,16 @@ rxjs@^7.5.5:
dependencies: dependencies:
tslib "^2.1.0" tslib "^2.1.0"
safe-array-concat@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/safe-array-concat/-/safe-array-concat-1.0.1.tgz#91686a63ce3adbea14d61b14c99572a8ff84754c"
integrity sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==
dependencies:
call-bind "^1.0.2"
get-intrinsic "^1.2.1"
has-symbols "^1.0.3"
isarray "^2.0.5"
safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0: safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0:
version "5.2.1" version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
@ -19334,6 +19643,11 @@ semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0:
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d"
integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==
semver@^6.3.1:
version "6.3.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4: semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4:
version "7.5.4" version "7.5.4"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e" resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.4.tgz#483986ec4ed38e1c6c48c34894a9182dbff68a6e"
@ -19387,6 +19701,25 @@ set-blocking@^2.0.0, set-blocking@~2.0.0:
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==
set-function-length@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.1.1.tgz#4bc39fafb0307224a33e106a7d35ca1218d659ed"
integrity sha512-VoaqjbBJKiWtg4yRcKBQ7g7wnGnLV3M8oLvVWwOk2PdYY6PEFegR1vezXR0tw6fZGF9csVakIRjrJiy2veSBFQ==
dependencies:
define-data-property "^1.1.1"
get-intrinsic "^1.2.1"
gopd "^1.0.1"
has-property-descriptors "^1.0.0"
set-function-name@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/set-function-name/-/set-function-name-2.0.1.tgz#12ce38b7954310b9f61faa12701620a0c882793a"
integrity sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==
dependencies:
define-data-property "^1.0.1"
functions-have-names "^1.2.3"
has-property-descriptors "^1.0.0"
set-value@^2.0.0, set-value@^2.0.1: set-value@^2.0.0, set-value@^2.0.1:
version "2.0.1" version "2.0.1"
resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b"
@ -20109,6 +20442,15 @@ string.prototype.startswith@^1.0.0:
define-properties "^1.1.3" define-properties "^1.1.3"
es-abstract "^1.17.5" es-abstract "^1.17.5"
string.prototype.trim@^1.2.8:
version "1.2.8"
resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz#f9ac6f8af4bd55ddfa8895e6aea92a96395393bd"
integrity sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
string.prototype.trimend@^1.0.6: string.prototype.trimend@^1.0.6:
version "1.0.6" version "1.0.6"
resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533" resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.6.tgz#c4a27fa026d979d79c04f17397f250a462944533"
@ -20118,6 +20460,15 @@ string.prototype.trimend@^1.0.6:
define-properties "^1.1.4" define-properties "^1.1.4"
es-abstract "^1.20.4" es-abstract "^1.20.4"
string.prototype.trimend@^1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz#1bb3afc5008661d73e2dc015cd4853732d6c471e"
integrity sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
string.prototype.trimstart@^1.0.6: string.prototype.trimstart@^1.0.6:
version "1.0.6" version "1.0.6"
resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4" resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.6.tgz#e90ab66aa8e4007d92ef591bbf3cd422c56bdcf4"
@ -20127,6 +20478,15 @@ string.prototype.trimstart@^1.0.6:
define-properties "^1.1.4" define-properties "^1.1.4"
es-abstract "^1.20.4" es-abstract "^1.20.4"
string.prototype.trimstart@^1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz#d4cdb44b83a4737ffbac2d406e405d43d0184298"
integrity sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==
dependencies:
call-bind "^1.0.2"
define-properties "^1.2.0"
es-abstract "^1.22.1"
string_decoder@^1.1.1: string_decoder@^1.1.1:
version "1.3.0" version "1.3.0"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e" resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
@ -21079,6 +21439,16 @@ tsconfig-paths@^3.10.1:
minimist "^1.2.6" minimist "^1.2.6"
strip-bom "^3.0.0" strip-bom "^3.0.0"
tsconfig-paths@^3.14.2:
version "3.14.2"
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.14.2.tgz#6e32f1f79412decd261f92d633a9dc1cfa99f088"
integrity sha512-o/9iXgCYc5L/JxCHPe3Hvh8Q/2xm5Z+p18PESBU6Ff33695QnCHBEjcytY2q19ua7Mbl/DavtBOLq+oG0RCL+g==
dependencies:
"@types/json5" "^0.0.29"
json5 "^1.0.2"
minimist "^1.2.6"
strip-bom "^3.0.0"
tsconfig-paths@^4.1.2, tsconfig-paths@^4.2.0: tsconfig-paths@^4.1.2, tsconfig-paths@^4.2.0:
version "4.2.0" version "4.2.0"
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz#ef78e19039133446d244beac0fd6a1632e2d107c" resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-4.2.0.tgz#ef78e19039133446d244beac0fd6a1632e2d107c"
@ -21203,6 +21573,36 @@ type-is@^1.6.14, type-is@^1.6.16, type-is@^1.6.18:
media-typer "0.3.0" media-typer "0.3.0"
mime-types "~2.1.24" mime-types "~2.1.24"
typed-array-buffer@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz#18de3e7ed7974b0a729d3feecb94338d1472cd60"
integrity sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==
dependencies:
call-bind "^1.0.2"
get-intrinsic "^1.2.1"
is-typed-array "^1.1.10"
typed-array-byte-length@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz#d787a24a995711611fb2b87a4052799517b230d0"
integrity sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==
dependencies:
call-bind "^1.0.2"
for-each "^0.3.3"
has-proto "^1.0.1"
is-typed-array "^1.1.10"
typed-array-byte-offset@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz#cbbe89b51fdef9cd6aaf07ad4707340abbc4ea0b"
integrity sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==
dependencies:
available-typed-arrays "^1.0.5"
call-bind "^1.0.2"
for-each "^0.3.3"
has-proto "^1.0.1"
is-typed-array "^1.1.10"
typed-array-length@^1.0.4: typed-array-length@^1.0.4:
version "1.0.4" version "1.0.4"
resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb" resolved "https://registry.yarnpkg.com/typed-array-length/-/typed-array-length-1.0.4.tgz#89d83785e5c4098bec72e08b319651f0eac9c1bb"
@ -21978,6 +22378,17 @@ which-module@^2.0.0:
resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a"
integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q== integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q==
which-typed-array@^1.1.11, which-typed-array@^1.1.13:
version "1.1.13"
resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.13.tgz#870cd5be06ddb616f504e7b039c4c24898184d36"
integrity sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==
dependencies:
available-typed-arrays "^1.0.5"
call-bind "^1.0.4"
for-each "^0.3.3"
gopd "^1.0.1"
has-tostringtag "^1.0.0"
which-typed-array@^1.1.9: which-typed-array@^1.1.9:
version "1.1.9" version "1.1.9"
resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.9.tgz#307cf898025848cf995e795e8423c7f337efbde6" resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.9.tgz#307cf898025848cf995e795e8423c7f337efbde6"