Merge branch 'master' of github.com:Budibase/budibase into labday/sqs

This commit is contained in:
mike12345567 2023-11-17 17:23:25 +00:00
commit 3228463749
397 changed files with 8275 additions and 4310 deletions

View File

@ -19,6 +19,7 @@
"bundle.js"
],
"extends": ["eslint:recommended"],
"plugins": ["import", "eslint-plugin-local-rules"],
"overrides": [
{
"files": ["**/*.svelte"],
@ -30,7 +31,6 @@
"sourceType": "module",
"allowImportExportEverywhere": true
}
},
{
"files": ["**/*.ts"],
@ -42,13 +42,22 @@
"no-case-declarations": "off",
"no-useless-escape": "off",
"no-undef": "off",
"no-prototype-builtins": "off"
"no-prototype-builtins": "off",
"local-rules/no-budibase-imports": "error"
}
}
],
"rules": {
"no-self-assign": "off",
"no-unused-vars": ["error", { "varsIgnorePattern": "^_", "argsIgnorePattern": "^_", "destructuredArrayIgnorePattern": "^_" }]
"no-unused-vars": [
"error",
{
"varsIgnorePattern": "^_",
"argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_"
}
],
"import/no-relative-packages": "error"
},
"globals": {
"GeolocationPositionError": true

View File

@ -18,51 +18,42 @@ env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }}
IS_OSS_CONTRIBUTOR: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase' }}
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
- name: Checkout repo
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: "yarn"
cache: yarn
- run: yarn --frozen-lockfile
- run: yarn lint
build:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
- name: Checkout repo
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: "yarn"
cache: yarn
- run: yarn --frozen-lockfile
# Run build all the projects
@ -81,24 +72,18 @@ jobs:
test-libraries:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
- name: Checkout repo
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: "yarn"
cache: yarn
- run: yarn --frozen-lockfile
- name: Test
run: |
@ -116,24 +101,18 @@ jobs:
test-worker:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
- name: Checkout repo
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: "yarn"
cache: yarn
- run: yarn --frozen-lockfile
- name: Test worker
run: |
@ -152,24 +131,18 @@ jobs:
test-server:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
- name: Checkout repo
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
fetch-depth: 0
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
with:
fetch-depth: 0
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: "yarn"
cache: yarn
- run: yarn --frozen-lockfile
- name: Test server
run: |
@ -200,7 +173,7 @@ jobs:
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: "yarn"
cache: yarn
- run: yarn --frozen-lockfile
- name: Test
run: |
@ -213,21 +186,17 @@ jobs:
integration-test:
runs-on: ubuntu-latest
steps:
- name: Checkout repo and submodules
- name: Checkout repo
uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
with:
submodules: true
submodules: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
token: ${{ secrets.PERSONAL_ACCESS_TOKEN || github.token }}
- name: Checkout repo only
uses: actions/checkout@v3
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase'
- name: Use Node.js 18.x
uses: actions/setup-node@v3
with:
node-version: 18.x
cache: "yarn"
cache: yarn
- run: yarn --frozen-lockfile
- name: Build packages
run: yarn build --scope @budibase/server --scope @budibase/worker

View File

@ -1,48 +0,0 @@
name: Budibase Deploy Production
on:
workflow_dispatch:
inputs:
version:
description: Budibase release version. For example - 1.0.0
required: false
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Get the latest budibase release version
id: version
run: |
if [ -z "${{ github.event.inputs.version }}" ]; then
release_version=$(cat lerna.json | jq -r '.version')
else
release_version=${{ github.event.inputs.version }}
fi
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-prod-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -4,7 +4,6 @@ on:
push:
branches:
- master
- BUDI-7641/push_v2_images_to_qa
workflow_dispatch:
jobs:
@ -12,10 +11,12 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: peter-evans/repository-dispatch@v2
env:
PAYLOAD_VERSION: ${{ github.sha }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event-type: budicloud-qa-deploy
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"VERSION": "${{ github.sha }}",
"REF_NAME": "${{ github.ref_name}}"
}

View File

@ -1,130 +0,0 @@
name: Budibase Release
concurrency:
group: release
cancel-in-progress: false
on:
push:
tags:
- "[0-9]+.[0-9]+.[0-9]+"
# Exclude all pre-releases
- "!*[0-9]+.[0-9]+.[0-9]+-*"
env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
jobs:
release-images:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- uses: actions/setup-node@v1
with:
node-version: 18.x
- run: yarn install --frozen-lockfile
- name: Update versions
run: ./scripts/updateVersions.sh
- run: yarn lint
- run: yarn build
- run: yarn build:sdk
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
# setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default
git config --global user.name "Budibase Release Bot"
git config --global user.email "<>"
git submodule foreach git commit -a -m 'Release process'
git commit -a -m 'Release process'
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release
- name: "Get Current tag"
id: currenttag
run: |
version=$(./scripts/getCurrentVersion.sh)
echo "Using tag $version"
echo "version=$version" >> "$GITHUB_OUTPUT"
- name: Build/release Docker images
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
BUDIBASE_RELEASE_VERSION: ${{ steps.currenttag.outputs.version }}
release-helm-chart:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version 0.0.0-master --app-version "$RELEASE_VERSION" --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
REF_NAME: ${{ github.ref_name}}
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -1,125 +0,0 @@
name: Budibase Release Selfhost
on:
workflow_dispatch:
jobs:
release:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js 18.x
uses: actions/setup-node@v1
with:
node-version: 18.x
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Docker images (Self Host)
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
release_tag=${{ env.RELEASE_VERSION }}
# Pull apps and worker images
docker pull budibase/apps:$release_tag
docker pull budibase/worker:$release_tag
docker pull budibase/proxy:$release_tag
# Tag apps and worker images
docker tag budibase/apps:$release_tag budibase/apps:$SELFHOST_TAG
docker tag budibase/worker:$release_tag budibase/worker:$SELFHOST_TAG
docker tag budibase/proxy:$release_tag budibase/proxy:$SELFHOST_TAG
# Push images
docker push budibase/apps:$SELFHOST_TAG
docker push budibase/worker:$SELFHOST_TAG
docker push budibase/proxy:$SELFHOST_TAG
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
SELFHOST_TAG: latest
- name: Bootstrap and build (CLI)
run: |
yarn
yarn build
- name: Build OpenAPI spec
run: |
pushd packages/server
yarn
yarn specs
popd
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version "$RELEASE_VERSION" --app-version "$RELEASE_VERSION" --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Perform Github Release
uses: softprops/action-gh-release@v1
with:
name: ${{ env.RELEASE_VERSION }}
tag_name: ${{ env.RELEASE_VERSION }}
generate_release_notes: true
files: |
packages/cli/build/cli-win.exe
packages/cli/build/cli-linux
packages/cli/build/cli-macos
packages/server/specs/openapi.yaml
packages/server/specs/openapi.json
- name: Discord Webhook Action
uses: tsickert/discord-webhook@v4.0.0
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Self Host Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Self Host."
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -1,69 +0,0 @@
name: Test
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: "Checkout"
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: "yarn"
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Run Yarn
run: yarn
- name: Run Yarn Build
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
cache-to: type=inline
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View File

@ -1,79 +0,0 @@
name: Deploy Budibase Single Container Image to DockerHub
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 30000
swap-size-mb: 1024
remove-android: "true"
remove-dotnet: "true"
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- name: "Checkout"
uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Run Yarn
run: yarn
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build
run: yarn build
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase,budibase/budibase:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-aas,budibase/budibase-aas:${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile

View File

@ -1,4 +1,4 @@
name: Tag release
name: Release
concurrency:
group: tag-release
cancel-in-progress: false
@ -19,6 +19,8 @@ on:
jobs:
tag-release:
runs-on: ubuntu-latest
outputs:
version: ${{ steps.tag-release.outputs.version }}
steps:
- name: Fail if branch is not master
@ -33,6 +35,7 @@ jobs:
- run: cd scripts && yarn
- name: Tag release
id: tag-release
run: |
cd scripts
# setup the username and email.
@ -41,3 +44,23 @@ jobs:
BUMP_TYPE_INPUT=${{ github.event.inputs.versioning }}
BUMP_TYPE=${BUMP_TYPE_INPUT:-"patch"}
./versionCommit.sh $BUMP_TYPE
cd ..
new_version=$(./scripts/getCurrentVersion.sh)
echo "version=$new_version" >> $GITHUB_OUTPUT
trigger-release:
needs: [tag-release]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: peter-evans/repository-dispatch@v2
with:
repository: budibase/budibase-deploys
event-type: release-prod
token: ${{ secrets.GH_ACCESS_TOKEN }}
client-payload: |-
{
"TAG": "${{ needs.tag-release.outputs.version }}"
}

View File

@ -1,6 +1,6 @@
<p align="center">
<a href="https://www.budibase.com">
<img alt="Budibase" src="https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg" width="60" />
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
</a>
</p>
<h1 align="center">
@ -126,13 +126,6 @@ You can learn more about the Budibase API at the following places:
- [Build an app with Budibase and Next.js](https://budibase.com/blog/building-a-crud-app-with-budibase-and-next.js/)
<p align="center">
<img alt="Budibase data" src="https://res.cloudinary.com/daog6scxm/image/upload/v1647858558/Feb%20release/Start_building_with_Budibase_s_API_3_rhlzhv.png">
</p>
<br /><br />
<br /><br /><br />
## 🏁 Get started
Deploy Budibase self-hosted in your existing infrastructure, using Docker, Kubernetes, and Digital Ocean.

View File

@ -0,0 +1,21 @@
module.exports = {
"no-budibase-imports": {
create: function (context) {
return {
ImportDeclaration(node) {
const importPath = node.source.value
if (
/^@budibase\/[^/]+\/.*$/.test(importPath) &&
importPath !== "@budibase/backend-core/tests"
) {
context.report({
node,
message: `Importing from @budibase is not allowed, except for @budibase/backend-core/tests.`,
})
}
},
}
},
},
}

View File

@ -2,8 +2,8 @@
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
# Azure AppService uses /home for persistent data & SSH on port 2222
DATA_DIR="${DATA_DIR:-/home}"
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}

View File

@ -7,6 +7,8 @@ services:
build:
context: ..
dockerfile: packages/server/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbapps
environment:
SELF_HOSTED: 1
@ -30,13 +32,13 @@ services:
depends_on:
- worker-service
- redis-service
# volumes:
# - /some/path/to/plugins:/plugins
worker-service:
build:
context: ..
dockerfile: packages/worker/Dockerfile.v2
args:
- BUDIBASE_VERSION=0.0.0+dev-docker
container_name: build-bbworker
environment:
SELF_HOSTED: 1

View File

@ -2,16 +2,18 @@ server {
listen 443 ssl default_server;
listen [::]:443 ssl default_server;
server_name _;
ssl_certificate /etc/letsencrypt/live/CUSTOM_DOMAIN/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/CUSTOM_DOMAIN/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
error_log /dev/stderr warn;
access_log /dev/stdout main;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
# port_in_redirect off;
ssl_certificate /etc/letsencrypt/live/CUSTOM_DOMAIN/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/CUSTOM_DOMAIN/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
location ^~ /.well-known/acme-challenge/ {
default_type "text/plain";
root /var/www/html;
@ -47,6 +49,24 @@ server {
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/backups/ {
# calls to export apps are limited
limit_req zone=ratelimit burst=20 nodelay;
# 1800s timeout for app export requests
proxy_read_timeout 1800s;
proxy_connect_timeout 1800s;
proxy_send_timeout 1800s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://127.0.0.1:4001;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
@ -70,18 +90,49 @@ server {
rewrite ^/db/(.*)$ /$1 break;
}
location /socket/ {
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection 'upgrade';
proxy_set_header Host $host;
proxy_cache_bypass $http_upgrade;
proxy_pass http://127.0.0.1:4001;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $http_host;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
}
location /files/signed/ {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
# IMPORTANT: Signed urls will inspect the host header of the request.
# Normally a signed url will need to be generated with a specified client host in mind.
# To support dynamic hosts, e.g. some unknown self-hosted installation url,
# use a predefined host header. The host 'minio-service' is also used at the time of url signing.
proxy_set_header Host minio-service;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
rewrite ^/files/signed/(.*)$ /$1 break;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;

View File

@ -57,8 +57,8 @@
--spectrum-global-color-gray-600: rgb(144,144,144);
--spectrum-global-color-gray-900: rgb(255,255,255);
--spectrum-global-color-gray-800: rgb(227,227,227);
--spectrum-global-color-static-blue-600: rgb(20,115,230);
--spectrum-global-color-static-blue-hover: rgb( 18, 103, 207);
--bb-indigo: #6E56FF;
--bb-indigo-light: #9F8FFF;
}
html, body {
@ -90,15 +90,8 @@
.info {
display: flex;
flex-direction: column;
align-items: left;
align-items: flex-start;
}
@media only screen and (max-width: 600px) {
.info {
align-items: center;
}
}
.status {
color: var(--spectrum-global-color-gray-600)
}
@ -113,13 +106,14 @@
.buttons {
display: flex;
flex-direction: row;
justify-content: flex-start;
margin-top: 15px;
}
.homeButton {
background-color: var(--spectrum-global-color-static-blue-600);
background-color: var(--bb-indigo);
}
.homeButton:hover {
background-color: var(--spectrum-global-color-static-blue-hover);
background-color: var(--bb-indigo-light);
}
.statusButton {
background-color: transparent;
@ -127,20 +121,30 @@
border: none;
}
.hero {
height: 160px;
width: 160px;
margin-right: 80px;
height: 60px;
margin: 10px 40px 10px 0;
}
.hero img {
height: 100%;
}
.content {
display: flex;
flex-direction: row;
align-items: flex-end;
align-items: center;
justify-content: center;
padding: 0 40px;
}
h1 {
margin-bottom: 10px;
}
h3 {
margin-top: 0;
}
@media only screen and (max-width: 600px) {
.content {
flex-direction: column;
align-items: flex-start;
}
}
</style>
@ -152,16 +156,15 @@
<div class="main">
<div class="content">
<div class="hero">
<img src="https://raw.githubusercontent.com/Budibase/budibase/master/packages/builder/assets/bb-space-man.svg" alt="Budibase Logo">
<img src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" alt="Budibase Logo">
</div>
<div class="info">
<div>
<h4 id="status" class="status"></h4>
<h4 id="status" class="status">&nbsp;</h4>
<h1 class="title">
Houston we have a problem!
</h1>
<h3 id="message" class="message">
</h3>
<h3 id="message" class="message">&nbsp;</h3>
</div>
<div class="buttons">
<button class="homeButton" onclick=goHome()>Return home</button>

View File

@ -51,7 +51,7 @@ http {
proxy_buffering off;
set $csp_default "default-src 'self'";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io";
set $csp_script "script-src 'self' 'unsafe-inline' 'unsafe-eval' https://*.budibase.net https://cdn.budi.live https://js.intercomcdn.com https://widget.intercom.io https://d2l5prqdbvm3op.cloudfront.net";
set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com";
set $csp_object "object-src 'none'";
set $csp_base_uri "base-uri 'self'";

View File

@ -2,8 +2,8 @@
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
DATA_DIR=/home
# Azure AppService uses /home for persistent data & SSH on port 2222
DATA_DIR="${DATA_DIR:-/home}"
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
mkdir -p $DATA_DIR/{search,minio,couch}
mkdir -p $DATA_DIR/couch/{dbs,views}

View File

@ -1,10 +0,0 @@
#!/bin/bash
if [[ $TARGETARCH == arm* ]] ;
then
echo "INSTALLING ARM64 MINIO"
wget https://dl.min.io/server/minio/release/linux-arm64/minio
else
echo "INSTALLING AMD64 MINIO"
wget https://dl.min.io/server/minio/release/linux-amd64/minio
fi
chmod +x minio

View File

@ -1,18 +0,0 @@
#!/bin/bash
tag=$1
if [[ ! "$tag" ]]; then
echo "No tag present. You must pass a tag to this script"
exit 1
fi
echo "Tagging images with tag: $tag"
docker tag proxy-service budibase/proxy:$tag
docker tag app-service budibase/apps:$tag
docker tag worker-service budibase/worker:$tag
docker push --all-tags budibase/apps
docker push --all-tags budibase/worker
docker push --all-tags budibase/proxy

View File

@ -26,6 +26,7 @@ RUN ./scripts/removeWorkspaceDependencies.sh packages/worker/package.json
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN ./scripts/removeWorkspaceDependencies.sh package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code
@ -41,6 +42,7 @@ COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
ENV NODE_MAJOR 18
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
@ -48,10 +50,10 @@ ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server libaio1
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
RUN apt install -y software-properties-common apt-transport-https ca-certificates gnupg \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
@ -60,10 +62,8 @@ RUN apt install software-properties-common apt-transport-https gpg -y \
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
COPY scripts/install-node.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx
@ -117,6 +117,10 @@ EXPOSE 443
EXPOSE 2222
VOLUME /data
ARG BUDIBASE_VERSION
# Ensuring the version argument is sent
RUN test -n "$BUDIBASE_VERSION"
ENV BUDIBASE_VERSION=$BUDIBASE_VERSION
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"

View File

@ -22,7 +22,7 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
# Azure App Service customisations
if [[ "${TARGETBUILD}" = "aas" ]]; then
DATA_DIR=/home
DATA_DIR="${DATA_DIR:-/home}"
WEBSITES_ENABLE_APP_SERVICE_STORAGE=true
/etc/init.d/ssh start
else

View File

@ -1,6 +1,6 @@
<p align="center">
<a href="https://www.budibase.com">
<img alt="Budibase" src="https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg" width="60" />
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
</a>
</p>
<h1 align="center">

View File

@ -1,6 +1,6 @@
<p align="center">
<a href="https://www.budibase.com">
<img alt="Budibase" src="https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg" width="60" />
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
</a>
</p>
<h1 align="center">

View File

@ -1,6 +1,6 @@
<p align="center">
<a href="https://www.budibase.com">
<img alt="Budibase" src="https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg" width="60" />
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
</a>
</p>
<h1 align="center">

View File

@ -1,6 +1,6 @@
<p align="center">
<a href="https://www.budibase.com">
<img alt="Budibase" src="https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg" width="60" />
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
</a>
</p>
<h1 align="center">

View File

@ -1,6 +1,6 @@
<p align="center">
<a href="https://www.budibase.com">
<img alt="Budibase" src="https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg" width="60" />
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
</a>
</p>
<h1 align="center">

View File

@ -1,6 +1,6 @@
<p align="center">
<a href="https://www.budibase.com">
<img alt="Budibase" src="https://d33wubrfki0l68.cloudfront.net/aac32159d7207b5085e74a7ef67afbb7027786c5/2b1fd/img/logo/bb-emblem.svg" width="60" />
<img alt="Budibase" src="https://res.cloudinary.com/daog6scxm/image/upload/v1696515725/Branding/Assets/Symbol/RGB/Full%20Colour/Budibase_Symbol_RGB_FullColour_cbqvha_1_z5cwq2.svg" width="60" />
</a>
</p>
<h1 align="center">

View File

@ -1,5 +1,5 @@
{
"version": "2.11.39",
"version": "2.13.10",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -2,11 +2,17 @@
"name": "root",
"private": true,
"devDependencies": {
"@babel/core": "^7.22.5",
"@babel/eslint-parser": "^7.22.5",
"@babel/preset-env": "^7.22.5",
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@typescript-eslint/parser": "6.7.2",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0",
"eslint": "^8.44.0",
"eslint-plugin-import": "^2.29.0",
"eslint-plugin-local-rules": "^2.0.0",
"eslint-plugin-svelte": "^2.32.2",
"husky": "^8.0.3",
"kill-port": "^1.6.1",
"lerna": "7.1.1",
@ -17,23 +23,18 @@
"prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0",
"svelte": "3.49.0",
"typescript": "5.2.2",
"@babel/core": "^7.22.5",
"@babel/eslint-parser": "^7.22.5",
"@babel/preset-env": "^7.22.5",
"eslint-plugin-svelte": "^2.32.2",
"svelte-eslint-parser": "^0.32.0"
"svelte-eslint-parser": "^0.32.0",
"typescript": "5.2.2"
},
"scripts": {
"preinstall": "node scripts/syncProPackage.js",
"setup": "git config submodule.recurse true && git submodule update && node ./hosting/scripts/setup.js && yarn && yarn build && yarn dev",
"build": "lerna run build --stream",
"build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream",
"build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput",
"check:types": "lerna run check:types",
"build:sdk": "lerna run --stream build:sdk",
"deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular",
"release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset",
"release:develop": "yarn release --dist-tag develop",
"restore": "yarn run clean && yarn && yarn run build",
"nuke": "yarn run nuke:packages && yarn run nuke:docker",
"nuke:packages": "yarn run restore",
@ -55,10 +56,6 @@
"lint:fix:prettier": "prettier --write \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --write \"qa-core/**/*.{js,ts,svelte}\"",
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs",
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",

View File

@ -3,6 +3,7 @@ const mockS3 = {
deleteObject: jest.fn().mockReturnThis(),
deleteObjects: jest.fn().mockReturnThis(),
createBucket: jest.fn().mockReturnThis(),
getObject: jest.fn().mockReturnThis(),
listObject: jest.fn().mockReturnThis(),
getSignedUrl: jest.fn((operation: string, params: any) => {
return `http://s3.example.com/${params.Bucket}/${params.Key}`

View File

@ -21,7 +21,7 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/nano": "10.1.2",
"@budibase/nano": "10.1.3",
"@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0",

View File

@ -19,7 +19,7 @@ async function populateFromDB(appId: string) {
return doWithDB(
appId,
(db: Database) => {
return db.get(DocumentType.APP_METADATA)
return db.get<App>(DocumentType.APP_METADATA)
},
{ skip_setup: true }
)

View File

@ -1,6 +1,6 @@
const BaseCache = require("./base")
import BaseCache from "./base"
const GENERIC = new BaseCache.default()
const GENERIC = new BaseCache()
export enum CacheKey {
CHECKLIST = "checklist",
@ -19,6 +19,7 @@ export enum TTL {
}
function performExport(funcName: string) {
// @ts-ignore
return (...args: any) => GENERIC[funcName](...args)
}

View File

@ -2,4 +2,6 @@ export * as generic from "./generic"
export * as user from "./user"
export * as app from "./appMetadata"
export * as writethrough from "./writethrough"
export * as invite from "./invite"
export * as passwordReset from "./passwordReset"
export * from "./generic"

View File

@ -0,0 +1,86 @@
import * as utils from "../utils"
import { Duration, DurationType } from "../utils"
import env from "../environment"
import { getTenantId } from "../context"
import * as redis from "../redis/init"
const TTL_SECONDS = Duration.fromDays(7).toSeconds()
interface Invite {
email: string
info: any
}
interface InviteWithCode extends Invite {
code: string
}
/**
* Given an invite code and invite body, allow the update an existing/valid invite in redis
* @param code The invite code for an invite in redis
* @param value The body of the updated user invitation
*/
export async function updateCode(code: string, value: Invite) {
const client = await redis.getInviteClient()
await client.store(code, value, TTL_SECONDS)
}
/**
* Generates an invitation code and writes it to redis - which can later be checked for user creation.
* @param email the email address which the code is being sent to (for use later).
* @param info Information to be carried along with the invitation.
* @return returns the code that was stored to redis.
*/
export async function createCode(email: string, info: any): Promise<string> {
const code = utils.newid()
const client = await redis.getInviteClient()
await client.store(code, { email, info }, TTL_SECONDS)
return code
}
/**
* Checks that the provided invite code is valid - will return the email address of user that was invited.
* @param code the invite code that was provided as part of the link.
* @return If the code is valid then an email address will be returned.
*/
export async function getCode(code: string): Promise<Invite> {
const client = await redis.getInviteClient()
const value = (await client.get(code)) as Invite | undefined
if (!value) {
throw "Invitation is not valid or has expired, please request a new one."
}
return value
}
export async function deleteCode(code: string) {
const client = await redis.getInviteClient()
await client.delete(code)
}
/**
Get all currently available user invitations for the current tenant.
**/
export async function getInviteCodes(): Promise<InviteWithCode[]> {
const client = await redis.getInviteClient()
const invites: { key: string; value: Invite }[] = await client.scan()
const results: InviteWithCode[] = invites.map(invite => {
return {
...invite.value,
code: invite.key,
}
})
if (!env.MULTI_TENANCY) {
return results
}
const tenantId = getTenantId()
return results.filter(invite => tenantId === invite.info.tenantId)
}
export async function getExistingInvites(
emails: string[]
): Promise<InviteWithCode[]> {
return (await getInviteCodes()).filter(invite =>
emails.includes(invite.email)
)
}

View File

@ -0,0 +1,38 @@
import * as redis from "../redis/init"
import * as utils from "../utils"
import { Duration, DurationType } from "../utils"
const TTL_SECONDS = Duration.fromHours(1).toSeconds()
interface PasswordReset {
userId: string
info: any
}
/**
* Given a user ID this will store a code (that is returned) for an hour in redis.
* The user can then return this code for resetting their password (through their reset link).
* @param userId the ID of the user which is to be reset.
* @param info Info about the user/the reset process.
* @return returns the code that was stored to redis.
*/
export async function createCode(userId: string, info: any): Promise<string> {
const code = utils.newid()
const client = await redis.getPasswordResetClient()
await client.store(code, { userId, info }, TTL_SECONDS)
return code
}
/**
* Given a reset code this will lookup to redis, check if the code is valid.
* @param code The code provided via the email link.
* @return returns the user ID if it is found
*/
export async function getCode(code: string): Promise<PasswordReset> {
const client = await redis.getPasswordResetClient()
const value = (await client.get(code)) as PasswordReset | undefined
if (!value) {
throw "Provided information is not valid, cannot reset password - please try again."
}
return value
}

View File

@ -28,7 +28,7 @@ export enum ViewName {
APP_BACKUP_BY_TRIGGER = "by_trigger",
}
export const DeprecatedViews = {
export const DeprecatedViews: Record<string, string[]> = {
[ViewName.USER_BY_EMAIL]: [
// removed due to inaccuracy in view doc filter logic
"by_email",

View File

@ -4,7 +4,7 @@ import { ContextMap } from "./types"
export default class Context {
static storage = new AsyncLocalStorage<ContextMap>()
static run(context: ContextMap, func: any) {
static run<T>(context: ContextMap, func: () => T) {
return Context.storage.run(context, () => func())
}

View File

@ -98,17 +98,17 @@ function updateContext(updates: ContextMap): ContextMap {
return context
}
async function newContext(updates: ContextMap, task: any) {
async function newContext<T>(updates: ContextMap, task: () => T) {
// see if there already is a context setup
let context: ContextMap = updateContext(updates)
return Context.run(context, task)
}
export async function doInAutomationContext(params: {
export async function doInAutomationContext<T>(params: {
appId: string
automationId: string
task: any
}): Promise<any> {
task: () => T
}): Promise<T> {
const tenantId = getTenantIDFromAppID(params.appId)
return newContext(
{
@ -144,10 +144,10 @@ export async function doInTenant<T>(
return newContext(updates, task)
}
export async function doInAppContext(
export async function doInAppContext<T>(
appId: string | null,
task: any
): Promise<any> {
task: () => T
): Promise<T> {
if (!appId && !env.isTest()) {
throw new Error("appId is required")
}
@ -165,10 +165,10 @@ export async function doInAppContext(
return newContext(updates, task)
}
export async function doInIdentityContext(
export async function doInIdentityContext<T>(
identity: IdentityContext,
task: any
): Promise<any> {
task: () => T
): Promise<T> {
if (!identity) {
throw new Error("identity is required")
}
@ -276,6 +276,9 @@ export function getAuditLogsDB(): Database {
*/
export function getAppDB(opts?: any): Database {
const appId = getAppId()
if (!appId) {
throw new Error("Unable to retrieve app DB - no app ID.")
}
return getDB(appId, opts)
}

View File

@ -8,3 +8,7 @@ export const CONSTANT_INTERNAL_ROW_COLS = [
] as const
export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const
export function isInternalColumnName(name: string): boolean {
return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name)
}

View File

@ -10,6 +10,7 @@ import {
DatabaseDeleteIndexOpts,
Document,
isDocument,
RowResponse,
} from "@budibase/types"
import { getCouchInfo } from "./connections"
import { directCouchUrlCall } from "./utils"
@ -49,10 +50,7 @@ export class DatabaseImpl implements Database {
private readonly couchInfo = getCouchInfo()
constructor(dbName?: string, opts?: DatabaseOpts, connection?: string) {
if (dbName == null) {
throw new Error("Database name cannot be undefined.")
}
constructor(dbName: string, opts?: DatabaseOpts, connection?: string) {
this.name = dbName
this.pouchOpts = opts || {}
if (connection) {
@ -113,7 +111,7 @@ export class DatabaseImpl implements Database {
}
}
async get<T>(id?: string): Promise<T | any> {
async get<T extends Document>(id?: string): Promise<T> {
const db = await this.checkSetup()
if (!id) {
throw new Error("Unable to get doc without a valid _id.")
@ -121,6 +119,35 @@ export class DatabaseImpl implements Database {
return this.updateOutput(() => db.get(id))
}
async getMultiple<T extends Document>(
ids: string[],
opts?: { allowMissing?: boolean }
): Promise<T[]> {
// get unique
ids = [...new Set(ids)]
const response = await this.allDocs<T>({
keys: ids,
include_docs: true,
})
const rowUnavailable = (row: RowResponse<T>) => {
// row is deleted - key lookup can return this
if (row.doc == null || ("deleted" in row.value && row.value.deleted)) {
return true
}
return row.error === "not_found"
}
const rows = response.rows.filter(row => !rowUnavailable(row))
const someMissing = rows.length !== response.rows.length
// some were filtered out - means some missing
if (!opts?.allowMissing && someMissing) {
const missing = response.rows.filter(row => rowUnavailable(row))
const missingIds = missing.map(row => row.key).join(", ")
throw new Error(`Unable to get documents: ${missingIds}`)
}
return rows.map(row => row.doc!)
}
async remove(idOrDoc: string | Document, rev?: string) {
const db = await this.checkSetup()
let _id: string
@ -176,7 +203,9 @@ export class DatabaseImpl implements Database {
return this.updateOutput(() => db.bulk({ docs: documents }))
}
async allDocs<T>(params: DatabaseQueryOpts): Promise<AllDocsResponse<T>> {
async allDocs<T extends Document>(
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {
const db = await this.checkSetup()
return this.updateOutput(() => db.list(params))
}
@ -196,7 +225,7 @@ export class DatabaseImpl implements Database {
return (await response.json()) as T
}
async query<T>(
async query<T extends Document>(
viewName: string,
params: DatabaseQueryOpts
): Promise<AllDocsResponse<T>> {

View File

@ -1,10 +1,7 @@
import env from "../environment"
import { directCouchQuery, DatabaseImpl } from "./couch"
import { CouchFindOptions, Database } from "@budibase/types"
import { CouchFindOptions, Database, DatabaseOpts } from "@budibase/types"
const dbList = new Set()
export function getDB(dbName?: string, opts?: any): Database {
export function getDB(dbName: string, opts?: DatabaseOpts): Database {
return new DatabaseImpl(dbName, opts)
}
@ -14,7 +11,7 @@ export function getDB(dbName?: string, opts?: any): Database {
export async function doWithDB<T>(
dbName: string,
cb: (db: Database) => Promise<T>,
opts = {}
opts?: DatabaseOpts
) {
const db = getDB(dbName, opts)
// need this to be async so that we can correctly close DB after all
@ -22,13 +19,6 @@ export async function doWithDB<T>(
return await cb(db)
}
export function allDbs() {
if (!env.isTest()) {
throw new Error("Cannot be used outside test environment.")
}
return [...dbList]
}
export async function directCouchAllDbs(queryString?: string) {
let couchPath = "/_all_dbs"
if (queryString) {

View File

@ -7,12 +7,19 @@ import {
} from "../constants"
import { getGlobalDB } from "../context"
import { doWithDB } from "./"
import { AllDocsResponse, Database, DatabaseQueryOpts } from "@budibase/types"
import {
AllDocsResponse,
Database,
DatabaseQueryOpts,
Document,
DesignDocument,
DBView,
} from "@budibase/types"
import env from "../environment"
const DESIGN_DB = "_design/database"
function DesignDoc() {
function DesignDoc(): DesignDocument {
return {
_id: DESIGN_DB,
// view collation information, read before writing any complex views:
@ -21,20 +28,14 @@ function DesignDoc() {
}
}
interface DesignDocument {
views: any
}
async function removeDeprecated(db: Database, viewName: ViewName) {
// @ts-ignore
if (!DeprecatedViews[viewName]) {
return
}
try {
const designDoc = await db.get<DesignDocument>(DESIGN_DB)
// @ts-ignore
for (let deprecatedNames of DeprecatedViews[viewName]) {
delete designDoc.views[deprecatedNames]
delete designDoc.views?.[deprecatedNames]
}
await db.put(designDoc)
} catch (err) {
@ -43,18 +44,18 @@ async function removeDeprecated(db: Database, viewName: ViewName) {
}
export async function createView(
db: any,
db: Database,
viewJs: string,
viewName: string
): Promise<void> {
let designDoc
try {
designDoc = (await db.get(DESIGN_DB)) as DesignDocument
designDoc = await db.get<DesignDocument>(DESIGN_DB)
} catch (err) {
// no design doc, make one
designDoc = DesignDoc()
}
const view = {
const view: DBView = {
map: viewJs,
}
designDoc.views = {
@ -109,7 +110,7 @@ export interface QueryViewOptions {
arrayResponse?: boolean
}
export async function queryViewRaw<T>(
export async function queryViewRaw<T extends Document>(
viewName: ViewName,
params: DatabaseQueryOpts,
db: Database,
@ -137,18 +138,16 @@ export async function queryViewRaw<T>(
}
}
export const queryView = async <T>(
export const queryView = async <T extends Document>(
viewName: ViewName,
params: DatabaseQueryOpts,
db: Database,
createFunc: any,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
): Promise<T[] | T> => {
const response = await queryViewRaw<T>(viewName, params, db, createFunc, opts)
const rows = response.rows
const docs = rows.map((row: any) =>
params.include_docs ? row.doc : row.value
)
const docs = rows.map(row => (params.include_docs ? row.doc! : row.value))
// if arrayResponse has been requested, always return array regardless of length
if (opts?.arrayResponse) {
@ -198,11 +197,11 @@ export const createPlatformUserView = async () => {
await createPlatformView(viewJs, ViewName.PLATFORM_USERS_LOWERCASE)
}
export const queryPlatformView = async <T>(
export const queryPlatformView = async <T extends Document>(
viewName: ViewName,
params: DatabaseQueryOpts,
opts?: QueryViewOptions
): Promise<T[] | T | undefined> => {
): Promise<T[] | T> => {
const CreateFuncByName: any = {
[ViewName.ACCOUNT_BY_EMAIL]: createPlatformAccountEmailView,
[ViewName.PLATFORM_USERS_LOWERCASE]: createPlatformUserView,
@ -220,7 +219,7 @@ const CreateFuncByName: any = {
[ViewName.USER_BY_APP]: createUserAppView,
}
export const queryGlobalView = async <T>(
export const queryGlobalView = async <T extends Document>(
viewName: ViewName,
params: DatabaseQueryOpts,
db?: Database,
@ -231,10 +230,10 @@ export const queryGlobalView = async <T>(
db = getGlobalDB()
}
const createFn = CreateFuncByName[viewName]
return queryView(viewName, params, db!, createFn, opts)
return queryView<T>(viewName, params, db!, createFn, opts)
}
export async function queryGlobalViewRaw<T>(
export async function queryGlobalViewRaw<T extends Document>(
viewName: ViewName,
params: DatabaseQueryOpts,
opts?: QueryViewOptions

View File

@ -6,6 +6,7 @@ import {
ViewName,
} from "../constants"
import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types"
/**
* If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -22,8 +23,8 @@ import { getProdAppID } from "./conversions"
export function getDocParams(
docType: string,
docId?: string | null,
otherProps: any = {}
) {
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (docId == null) {
docId = ""
}
@ -45,8 +46,8 @@ export function getDocParams(
export function getRowParams(
tableId?: string | null,
rowId?: string | null,
otherProps = {}
) {
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (tableId == null) {
return getDocParams(DocumentType.ROW, null, otherProps)
}
@ -88,7 +89,10 @@ export const isDatasourceId = (id: string) => {
/**
* Gets parameters for retrieving workspaces.
*/
export function getWorkspaceParams(id = "", otherProps = {}) {
export function getWorkspaceParams(
id = "",
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return {
...otherProps,
startkey: `${DocumentType.WORKSPACE}${SEPARATOR}${id}`,
@ -99,7 +103,10 @@ export function getWorkspaceParams(id = "", otherProps = {}) {
/**
* Gets parameters for retrieving users.
*/
export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
export function getGlobalUserParams(
globalId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
if (!globalId) {
globalId = ""
}
@ -117,11 +124,17 @@ export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
/**
* Gets parameters for retrieving users, this is a utility function for the getDocParams function.
*/
export function getUserMetadataParams(userId?: string | null, otherProps = {}) {
export function getUserMetadataParams(
userId?: string | null,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
return getRowParams(InternalTable.USER_METADATA, userId, otherProps)
}
export function getUsersByAppParams(appId: any, otherProps: any = {}) {
export function getUsersByAppParams(
appId: any,
otherProps: Partial<DatabaseQueryOpts> = {}
): DatabaseQueryOpts {
const prodAppId = getProdAppID(appId)
return {
...otherProps,

View File

@ -75,12 +75,12 @@ function getPackageJsonFields(): {
const content = readFileSync(packageJsonFile!, "utf-8")
const parsedContent = JSON.parse(content)
return {
VERSION: parsedContent.version,
VERSION: process.env.BUDIBASE_VERSION || parsedContent.version,
SERVICE_NAME: parsedContent.name,
}
} catch {
// throwing an error here is confusing/causes backend-core to be hard to import
return { VERSION: "", SERVICE_NAME: "" }
return { VERSION: process.env.BUDIBASE_VERSION || "", SERVICE_NAME: "" }
}
}

View File

@ -30,7 +30,9 @@ export * as timers from "./timers"
export { default as env } from "./environment"
export * as blacklist from "./blacklist"
export * as docUpdates from "./docUpdates"
export * from "./utils/Duration"
export { SearchParams } from "./db"
export * as docIds from "./docIds"
// Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal
// circular dependencies
@ -49,6 +51,7 @@ export * from "./constants"
// expose package init function
import * as db from "./db"
export const init = (opts: any = {}) => {
db.init(opts.db)
}

View File

@ -1,37 +1,50 @@
import env from "../../environment"
import * as objectStore from "../objectStore"
import * as cloudfront from "../cloudfront"
import qs from "querystring"
import { DEFAULT_TENANT_ID, getTenantId } from "../../context"
export function clientLibraryPath(appId: string) {
return `${objectStore.sanitizeKey(appId)}/budibase-client.js`
}
/**
* In production the client library is stored in the object store, however in development
* we use the symlinked version produced by lerna, located in node modules. We link to this
* via a specific endpoint (under /api/assets/client).
* @param appId In production we need the appId to look up the correct bucket, as the
* version of the client lib may differ between apps.
* @param version The version to retrieve.
* @return The URL to be inserted into appPackage response or server rendered
* app index file.
* Previously we used to serve the client library directly from Cloudfront, however
* due to issues with the domain we were unable to continue doing this - keeping
* incase we are able to switch back to CDN path again in future.
*/
export const clientLibraryUrl = (appId: string, version: string) => {
if (env.isProd()) {
let file = `${objectStore.sanitizeKey(appId)}/budibase-client.js`
if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache
if (version) {
file += `?v=${version}`
}
// don't need to use presigned for client with cloudfront
// file is public
return cloudfront.getUrl(file)
} else {
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
export function clientLibraryCDNUrl(appId: string, version: string) {
let file = clientLibraryPath(appId)
if (env.CLOUDFRONT_CDN) {
// append app version to bust the cache
if (version) {
file += `?v=${version}`
}
// don't need to use presigned for client with cloudfront
// file is public
return cloudfront.getUrl(file)
} else {
return `/api/assets/client`
return objectStore.getPresignedUrl(env.APPS_BUCKET_NAME, file)
}
}
export const getAppFileUrl = (s3Key: string) => {
export function clientLibraryUrl(appId: string, version: string) {
let tenantId, qsParams: { appId: string; version: string; tenantId?: string }
try {
tenantId = getTenantId()
} finally {
qsParams = {
appId,
version,
}
}
if (tenantId && tenantId !== DEFAULT_TENANT_ID) {
qsParams.tenantId = tenantId
}
return `/api/assets/client?${qs.encode(qsParams)}`
}
export function getAppFileUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {

View File

@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types"
// URLS
export const enrichPluginURLs = (plugins: Plugin[]) => {
export function enrichPluginURLs(plugins: Plugin[]) {
if (!plugins || !plugins.length) {
return []
}
@ -17,12 +17,12 @@ export const enrichPluginURLs = (plugins: Plugin[]) => {
})
}
const getPluginJSUrl = (plugin: Plugin) => {
function getPluginJSUrl(plugin: Plugin) {
const s3Key = getPluginJSKey(plugin)
return getPluginUrl(s3Key)
}
const getPluginIconUrl = (plugin: Plugin): string | undefined => {
function getPluginIconUrl(plugin: Plugin): string | undefined {
const s3Key = getPluginIconKey(plugin)
if (!s3Key) {
return
@ -30,7 +30,7 @@ const getPluginIconUrl = (plugin: Plugin): string | undefined => {
return getPluginUrl(s3Key)
}
const getPluginUrl = (s3Key: string) => {
function getPluginUrl(s3Key: string) {
if (env.CLOUDFRONT_CDN) {
return cloudfront.getPresignedUrl(s3Key)
} else {
@ -40,11 +40,11 @@ const getPluginUrl = (s3Key: string) => {
// S3 KEYS
export const getPluginJSKey = (plugin: Plugin) => {
export function getPluginJSKey(plugin: Plugin) {
return getPluginS3Key(plugin, "plugin.min.js")
}
export const getPluginIconKey = (plugin: Plugin) => {
export function getPluginIconKey(plugin: Plugin) {
// stored iconUrl is deprecated - hardcode to icon.svg in this case
const iconFileName = plugin.iconUrl ? "icon.svg" : plugin.iconFileName
if (!iconFileName) {
@ -53,12 +53,12 @@ export const getPluginIconKey = (plugin: Plugin) => {
return getPluginS3Key(plugin, iconFileName)
}
const getPluginS3Key = (plugin: Plugin, fileName: string) => {
function getPluginS3Key(plugin: Plugin, fileName: string) {
const s3Key = getPluginS3Dir(plugin.name)
return `${s3Key}/${fileName}`
}
export const getPluginS3Dir = (pluginName: string) => {
export function getPluginS3Dir(pluginName: string) {
let s3Key = `${pluginName}`
if (env.MULTI_TENANCY) {
const tenantId = context.getTenantId()

View File

@ -1,5 +1,4 @@
import * as app from "../app"
import { getAppFileUrl } from "../app"
import { testEnv } from "../../../../tests/extra"
describe("app", () => {
@ -7,6 +6,15 @@ describe("app", () => {
testEnv.nodeJest()
})
function baseCheck(url: string, tenantId?: string) {
expect(url).toContain("/api/assets/client")
if (tenantId) {
expect(url).toContain(`tenantId=${tenantId}`)
}
expect(url).toContain("appId=app_123")
expect(url).toContain("version=2.0.0")
}
describe("clientLibraryUrl", () => {
function getClientUrl() {
return app.clientLibraryUrl("app_123/budibase-client.js", "2.0.0")
@ -20,31 +28,19 @@ describe("app", () => {
it("gets url in dev", () => {
testEnv.nodeDev()
const url = getClientUrl()
expect(url).toBe("/api/assets/client")
})
it("gets url with embedded minio", () => {
testEnv.withMinio()
const url = getClientUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
baseCheck(url)
})
it("gets url with custom S3", () => {
testEnv.withS3()
const url = getClientUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
baseCheck(url)
})
it("gets url with cloudfront + s3", () => {
testEnv.withCloudfront()
const url = getClientUrl()
expect(url).toBe(
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
)
baseCheck(url)
})
})
@ -57,7 +53,7 @@ describe("app", () => {
testEnv.nodeDev()
await testEnv.withTenant(tenantId => {
const url = getClientUrl()
expect(url).toBe("/api/assets/client")
baseCheck(url, tenantId)
})
})
@ -65,9 +61,7 @@ describe("app", () => {
await testEnv.withTenant(tenantId => {
testEnv.withMinio()
const url = getClientUrl()
expect(url).toBe(
"/files/signed/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
baseCheck(url, tenantId)
})
})
@ -75,9 +69,7 @@ describe("app", () => {
await testEnv.withTenant(tenantId => {
testEnv.withS3()
const url = getClientUrl()
expect(url).toBe(
"http://s3.example.com/prod-budi-app-assets/app_123/budibase-client.js/budibase-client.js"
)
baseCheck(url, tenantId)
})
})
@ -85,9 +77,7 @@ describe("app", () => {
await testEnv.withTenant(tenantId => {
testEnv.withCloudfront()
const url = getClientUrl()
expect(url).toBe(
"http://cf.example.com/app_123/budibase-client.js/budibase-client.js?v=2.0.0"
)
baseCheck(url, tenantId)
})
})
})

View File

@ -1,6 +1,6 @@
const sanitize = require("sanitize-s3-objectkey")
import AWS from "aws-sdk"
import stream from "stream"
import stream, { Readable } from "stream"
import fetch from "node-fetch"
import tar from "tar-fs"
import zlib from "zlib"
@ -66,10 +66,10 @@ export function sanitizeBucket(input: string) {
* @return an S3 object store object, check S3 Nodejs SDK for usage.
* @constructor
*/
export const ObjectStore = (
export function ObjectStore(
bucket: string,
opts: { presigning: boolean } = { presigning: false }
) => {
) {
const config: any = {
s3ForcePathStyle: true,
signatureVersion: "v4",
@ -104,7 +104,7 @@ export const ObjectStore = (
* Given an object store and a bucket name this will make sure the bucket exists,
* if it does not exist then it will create it.
*/
export const makeSureBucketExists = async (client: any, bucketName: string) => {
export async function makeSureBucketExists(client: any, bucketName: string) {
bucketName = sanitizeBucket(bucketName)
try {
await client
@ -139,13 +139,13 @@ export const makeSureBucketExists = async (client: any, bucketName: string) => {
* Uploads the contents of a file given the required parameters, useful when
* temp files in use (for example file uploaded as an attachment).
*/
export const upload = async ({
export async function upload({
bucket: bucketName,
filename,
path,
type,
metadata,
}: UploadParams) => {
}: UploadParams) {
const extension = filename.split(".").pop()
const fileBytes = fs.readFileSync(path)
@ -180,12 +180,12 @@ export const upload = async ({
* Similar to the upload function but can be used to send a file stream
* through to the object store.
*/
export const streamUpload = async (
export async function streamUpload(
bucketName: string,
filename: string,
stream: any,
extra = {}
) => {
) {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
@ -215,7 +215,7 @@ export const streamUpload = async (
* retrieves the contents of a file from the object store, if it is a known content type it
* will be converted, otherwise it will be returned as a buffer stream.
*/
export const retrieve = async (bucketName: string, filepath: string) => {
export async function retrieve(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
const params = {
Bucket: sanitizeBucket(bucketName),
@ -230,7 +230,7 @@ export const retrieve = async (bucketName: string, filepath: string) => {
}
}
export const listAllObjects = async (bucketName: string, path: string) => {
export async function listAllObjects(bucketName: string, path: string) {
const objectStore = ObjectStore(bucketName)
const list = (params: ListParams = {}) => {
return objectStore
@ -261,11 +261,11 @@ export const listAllObjects = async (bucketName: string, path: string) => {
/**
* Generate a presigned url with a default TTL of 1 hour
*/
export const getPresignedUrl = (
export function getPresignedUrl(
bucketName: string,
key: string,
durationSeconds: number = 3600
) => {
) {
const objectStore = ObjectStore(bucketName, { presigning: true })
const params = {
Bucket: sanitizeBucket(bucketName),
@ -291,7 +291,7 @@ export const getPresignedUrl = (
/**
* Same as retrieval function but puts to a temporary file.
*/
export const retrieveToTmp = async (bucketName: string, filepath: string) => {
export async function retrieveToTmp(bucketName: string, filepath: string) {
bucketName = sanitizeBucket(bucketName)
filepath = sanitizeKey(filepath)
const data = await retrieve(bucketName, filepath)
@ -300,7 +300,7 @@ export const retrieveToTmp = async (bucketName: string, filepath: string) => {
return outputPath
}
export const retrieveDirectory = async (bucketName: string, path: string) => {
export async function retrieveDirectory(bucketName: string, path: string) {
let writePath = join(budibaseTempDir(), v4())
fs.mkdirSync(writePath)
const objects = await listAllObjects(bucketName, path)
@ -324,7 +324,7 @@ export const retrieveDirectory = async (bucketName: string, path: string) => {
/**
* Delete a single file.
*/
export const deleteFile = async (bucketName: string, filepath: string) => {
export async function deleteFile(bucketName: string, filepath: string) {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
@ -334,7 +334,7 @@ export const deleteFile = async (bucketName: string, filepath: string) => {
return objectStore.deleteObject(params).promise()
}
export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
export async function deleteFiles(bucketName: string, filepaths: string[]) {
const objectStore = ObjectStore(bucketName)
await makeSureBucketExists(objectStore, bucketName)
const params = {
@ -349,10 +349,10 @@ export const deleteFiles = async (bucketName: string, filepaths: string[]) => {
/**
* Delete a path, including everything within.
*/
export const deleteFolder = async (
export async function deleteFolder(
bucketName: string,
folder: string
): Promise<any> => {
): Promise<any> {
bucketName = sanitizeBucket(bucketName)
folder = sanitizeKey(folder)
const client = ObjectStore(bucketName)
@ -383,11 +383,11 @@ export const deleteFolder = async (
}
}
export const uploadDirectory = async (
export async function uploadDirectory(
bucketName: string,
localPath: string,
bucketPath: string
) => {
) {
bucketName = sanitizeBucket(bucketName)
let uploads = []
const files = fs.readdirSync(localPath, { withFileTypes: true })
@ -404,11 +404,11 @@ export const uploadDirectory = async (
return files
}
export const downloadTarballDirect = async (
export async function downloadTarballDirect(
url: string,
path: string,
headers = {}
) => {
) {
path = sanitizeKey(path)
const response = await fetch(url, { headers })
if (!response.ok) {
@ -418,11 +418,11 @@ export const downloadTarballDirect = async (
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))
}
export const downloadTarball = async (
export async function downloadTarball(
url: string,
bucketName: string,
path: string
) => {
) {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const response = await fetch(url)
@ -438,3 +438,17 @@ export const downloadTarball = async (
// return the temporary path incase there is a use for it
return tmpPath
}
export async function getReadStream(
bucketName: string,
path: string
): Promise<Readable> {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)
const client = ObjectStore(bucketName)
const params = {
Bucket: bucketName,
Key: path,
}
return client.getObject(params).createReadStream()
}

View File

@ -18,8 +18,12 @@ export const ObjectStoreBuckets = {
}
const bbTmp = join(tmpdir(), ".budibase")
if (!fs.existsSync(bbTmp)) {
try {
fs.mkdirSync(bbTmp)
} catch (e: any) {
if (e.code !== "EEXIST") {
throw e
}
}
export function budibaseTempDir() {

View File

@ -36,7 +36,7 @@ class InMemoryQueue {
* @param opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
constructor(name: string, opts = null) {
constructor(name: string, opts?: any) {
this._name = name
this._opts = opts
this._messages = []

View File

@ -2,11 +2,17 @@ import env from "../environment"
import { getRedisOptions } from "../redis/utils"
import { JobQueue } from "./constants"
import InMemoryQueue from "./inMemoryQueue"
import BullQueue from "bull"
import BullQueue, { QueueOptions } from "bull"
import { addListeners, StalledFn } from "./listeners"
import { Duration } from "../utils"
import * as timers from "../timers"
const CLEANUP_PERIOD_MS = 60 * 1000
// the queue lock is held for 5 minutes
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
// queue lock is refreshed every 30 seconds
const QUEUE_LOCK_RENEW_INTERNAL_MS = Duration.fromSeconds(30).toMs()
// cleanup the queue every 60 seconds
const CLEANUP_PERIOD_MS = Duration.fromSeconds(60).toMs()
let QUEUES: BullQueue.Queue[] | InMemoryQueue[] = []
let cleanupInterval: NodeJS.Timeout
@ -20,8 +26,15 @@ export function createQueue<T>(
jobQueue: JobQueue,
opts: { removeStalledCb?: StalledFn } = {}
): BullQueue.Queue<T> {
const { opts: redisOpts, redisProtocolUrl } = getRedisOptions()
const queueConfig: any = redisProtocolUrl || { redis: redisOpts }
const redisOpts = getRedisOptions()
const queueConfig: QueueOptions = {
redis: redisOpts,
settings: {
maxStalledCount: 0,
lockDuration: QUEUE_LOCK_MS,
lockRenewTime: QUEUE_LOCK_RENEW_INTERNAL_MS,
},
}
let queue: any
if (!env.isTest()) {
queue = new BullQueue(jobQueue, queueConfig)

View File

@ -7,15 +7,19 @@ let userClient: Client,
cacheClient: Client,
writethroughClient: Client,
lockClient: Client,
socketClient: Client
socketClient: Client,
inviteClient: Client,
passwordResetClient: Client
async function init() {
export async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init()
sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init()
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
lockClient = await new Client(utils.Databases.LOCKS).init()
writethroughClient = await new Client(utils.Databases.WRITE_THROUGH).init()
inviteClient = await new Client(utils.Databases.INVITATIONS).init()
passwordResetClient = await new Client(utils.Databases.PW_RESETS).init()
socketClient = await new Client(
utils.Databases.SOCKET_IO,
utils.SelectableDatabase.SOCKET_IO
@ -29,6 +33,8 @@ export async function shutdown() {
if (cacheClient) await cacheClient.finish()
if (writethroughClient) await writethroughClient.finish()
if (lockClient) await lockClient.finish()
if (inviteClient) await inviteClient.finish()
if (passwordResetClient) await passwordResetClient.finish()
if (socketClient) await socketClient.finish()
}
@ -84,3 +90,17 @@ export async function getSocketClient() {
}
return socketClient
}
export async function getInviteClient() {
if (!inviteClient) {
await init()
}
return inviteClient
}
export async function getPasswordResetClient() {
if (!passwordResetClient) {
await init()
}
return passwordResetClient
}

View File

@ -16,6 +16,7 @@ import {
getRedisOptions,
SEPARATOR,
SelectableDatabase,
getRedisConnectionDetails,
} from "./utils"
import * as timers from "../timers"
@ -27,7 +28,6 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
// for testing just generate the client once
let CLOSED = false
let CLIENTS: { [key: number]: any } = {}
0
let CONNECTED = false
// mock redis always connected
@ -91,12 +91,11 @@ function init(selectDb = DEFAULT_SELECT_DB) {
if (client) {
client.disconnect()
}
const { redisProtocolUrl, opts, host, port } = getRedisOptions()
const { host, port } = getRedisConnectionDetails()
const opts = getRedisOptions()
if (CLUSTERED) {
client = new RedisCore.Cluster([{ host, port }], opts)
} else if (redisProtocolUrl) {
client = new RedisCore(redisProtocolUrl)
} else {
client = new RedisCore(opts)
}

View File

@ -1,4 +1,5 @@
import env from "../environment"
import * as Redis from "ioredis"
const SLOT_REFRESH_MS = 2000
const CONNECT_TIMEOUT_MS = 10000
@ -42,7 +43,7 @@ export enum Databases {
export enum SelectableDatabase {
DEFAULT = 0,
SOCKET_IO = 1,
UNUSED_1 = 2,
RATE_LIMITING = 2,
UNUSED_2 = 3,
UNUSED_3 = 4,
UNUSED_4 = 5,
@ -58,7 +59,7 @@ export enum SelectableDatabase {
UNUSED_14 = 15,
}
export function getRedisOptions() {
export function getRedisConnectionDetails() {
let password = env.REDIS_PASSWORD
let url: string[] | string = env.REDIS_URL.split("//")
// get rid of the protocol
@ -74,28 +75,34 @@ export function getRedisOptions() {
}
const [host, port] = url.split(":")
let redisProtocolUrl
// fully qualified redis URL
if (/rediss?:\/\//.test(env.REDIS_URL)) {
redisProtocolUrl = env.REDIS_URL
return {
host,
password,
port: parseInt(port),
}
}
const opts: any = {
export function getRedisOptions() {
const { host, password, port } = getRedisConnectionDetails()
let redisOpts: Redis.RedisOptions = {
connectTimeout: CONNECT_TIMEOUT_MS,
port: port,
host,
password,
}
let opts: Redis.ClusterOptions | Redis.RedisOptions = redisOpts
if (env.REDIS_CLUSTERED) {
opts.redisOptions = {}
opts.redisOptions.tls = {}
opts.redisOptions.password = password
opts.slotsRefreshTimeout = SLOT_REFRESH_MS
opts.dnsLookup = (address: string, callback: any) => callback(null, address)
} else {
opts.host = host
opts.port = port
opts.password = password
opts = {
connectTimeout: CONNECT_TIMEOUT_MS,
redisOptions: {
...redisOpts,
tls: {},
},
slotsRefreshTimeout: SLOT_REFRESH_MS,
dnsLookup: (address: string, callback: any) => callback(null, address),
} as Redis.ClusterOptions
}
return { opts, host, port: parseInt(port), redisProtocolUrl }
return opts
}
export function addDbPrefix(db: string, key: string) {

View File

@ -122,7 +122,9 @@ export async function roleToNumber(id?: string) {
if (isBuiltin(id)) {
return builtinRoleToNumber(id)
}
const hierarchy = (await getUserRoleHierarchy(id)) as RoleDoc[]
const hierarchy = (await getUserRoleHierarchy(id, {
defaultPublic: true,
})) as RoleDoc[]
for (let role of hierarchy) {
if (isBuiltin(role?.inherits)) {
return builtinRoleToNumber(role.inherits) + 1
@ -192,12 +194,15 @@ export async function getRole(
/**
* Simple function to get all the roles based on the top level user role ID.
*/
async function getAllUserRoles(userRoleId?: string): Promise<RoleDoc[]> {
async function getAllUserRoles(
userRoleId?: string,
opts?: { defaultPublic?: boolean }
): Promise<RoleDoc[]> {
// admins have access to all roles
if (userRoleId === BUILTIN_IDS.ADMIN) {
return getAllRoles()
}
let currentRole = await getRole(userRoleId)
let currentRole = await getRole(userRoleId, opts)
let roles = currentRole ? [currentRole] : []
let roleIds = [userRoleId]
// get all the inherited roles
@ -226,12 +231,16 @@ export async function getUserRoleIdHierarchy(
* Returns an ordered array of the user's inherited role IDs, this can be used
* to determine if a user can access something that requires a specific role.
* @param userRoleId The user's role ID, this can be found in their access token.
* @param opts optional - if want to default to public use this.
* @returns returns an ordered array of the roles, with the first being their
* highest level of access and the last being the lowest level.
*/
export async function getUserRoleHierarchy(userRoleId?: string) {
export async function getUserRoleHierarchy(
userRoleId?: string,
opts?: { defaultPublic?: boolean }
) {
// special case, if they don't have a role then they are a public user
return getAllUserRoles(userRoleId)
return getAllUserRoles(userRoleId, opts)
}
// this function checks that the provided permissions are in an array format

View File

@ -164,14 +164,10 @@ export class UserDB {
}
}
static async getUsersByAppAccess(appId?: string) {
const opts: any = {
include_docs: true,
limit: 50,
}
static async getUsersByAppAccess(opts: { appId?: string; limit?: number }) {
let response: User[] = await usersCore.searchGlobalUsersByAppAccess(
appId,
opts
opts.appId,
{ limit: opts.limit || 50 }
)
return response
}
@ -307,7 +303,7 @@ export class UserDB {
static async bulkCreate(
newUsersRequested: User[],
groups: string[]
groups?: string[]
): Promise<BulkUserCreated> {
const tenantId = getTenantId()
@ -332,7 +328,7 @@ export class UserDB {
})
continue
}
newUser.userGroups = groups
newUser.userGroups = groups || []
newUsers.push(newUser)
if (isCreator(newUser)) {
newCreators.push(newUser)
@ -417,15 +413,13 @@ export class UserDB {
}
// Get users and delete
const allDocsResponse: AllDocsResponse<User> = await db.allDocs({
const allDocsResponse = await db.allDocs<User>({
include_docs: true,
keys: userIds,
})
const usersToDelete: User[] = allDocsResponse.rows.map(
(user: RowResponse<User>) => {
return user.doc
}
)
const usersToDelete = allDocsResponse.rows.map(user => {
return user.doc!
})
// Delete from DB
const toDelete = usersToDelete.map(user => ({

View File

@ -6,6 +6,7 @@ import {
} from "@budibase/types"
import * as dbUtils from "../db"
import { ViewName } from "../constants"
import { getExistingInvites } from "../cache/invite"
/**
* Apply a system-wide search on emails:
@ -26,6 +27,9 @@ export async function searchExistingEmails(emails: string[]) {
const existingAccounts = await getExistingAccounts(emails)
matchedEmails.push(...existingAccounts.map(account => account.email))
const invitedEmails = await getExistingInvites(emails)
matchedEmails.push(...invitedEmails.map(invite => invite.email))
return [...new Set(matchedEmails.map(email => email.toLowerCase()))]
}

View File

@ -19,6 +19,8 @@ import {
SearchUsersRequest,
User,
ContextUser,
DatabaseQueryOpts,
CouchFindOptions,
} from "@budibase/types"
import { getGlobalDB } from "../context"
import * as context from "../context"
@ -139,7 +141,7 @@ export const getGlobalUserByEmail = async (
export const searchGlobalUsersByApp = async (
appId: any,
opts: any,
opts: DatabaseQueryOpts,
getOpts?: GetOpts
) => {
if (typeof appId !== "string") {
@ -149,7 +151,7 @@ export const searchGlobalUsersByApp = async (
include_docs: true,
})
params.startkey = opts && opts.startkey ? opts.startkey : params.startkey
let response = await queryGlobalView(ViewName.USER_BY_APP, params)
let response = await queryGlobalView<User>(ViewName.USER_BY_APP, params)
if (!response) {
response = []
@ -165,7 +167,10 @@ export const searchGlobalUsersByApp = async (
Return any user who potentially has access to the application
Admins, developers and app users with the explicitly role.
*/
export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
export const searchGlobalUsersByAppAccess = async (
appId: any,
opts?: { limit?: number }
) => {
const roleSelector = `roles.${appId}`
let orQuery: any[] = [
@ -186,7 +191,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
orQuery.push(roleCheck)
}
let searchOptions = {
let searchOptions: CouchFindOptions = {
selector: {
$or: orQuery,
_id: {
@ -197,7 +202,7 @@ export const searchGlobalUsersByAppAccess = async (appId: any, opts: any) => {
}
const resp = await directCouchFind(context.getGlobalDBName(), searchOptions)
return resp?.rows
return resp.rows
}
export const getGlobalUserByAppPage = (appId: string, user: User) => {
@ -241,12 +246,15 @@ export const paginatedUsers = async ({
bookmark,
query,
appId,
limit,
}: SearchUsersRequest = {}) => {
const db = getGlobalDB()
const pageSize = limit ?? PAGE_LIMIT
const pageLimit = pageSize + 1
// get one extra document, to have the next page
const opts: any = {
const opts: DatabaseQueryOpts = {
include_docs: true,
limit: PAGE_LIMIT + 1,
limit: pageLimit,
}
// add a startkey if the page was specified (anchor)
if (bookmark) {
@ -269,7 +277,7 @@ export const paginatedUsers = async ({
const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc)
}
return pagination(userList, PAGE_LIMIT, {
return pagination(userList, pageSize, {
paginate: true,
property,
getKey,

View File

@ -0,0 +1,52 @@
export enum DurationType {
MILLISECONDS = "milliseconds",
SECONDS = "seconds",
MINUTES = "minutes",
HOURS = "hours",
DAYS = "days",
}
const conversion: Record<DurationType, number> = {
milliseconds: 1,
seconds: 1000,
minutes: 60 * 1000,
hours: 60 * 60 * 1000,
days: 24 * 60 * 60 * 1000,
}
export class Duration {
static convert(from: DurationType, to: DurationType, duration: number) {
const milliseconds = duration * conversion[from]
return milliseconds / conversion[to]
}
static from(from: DurationType, duration: number) {
return {
to: (to: DurationType) => {
return Duration.convert(from, to, duration)
},
toMs: () => {
return Duration.convert(from, DurationType.MILLISECONDS, duration)
},
toSeconds: () => {
return Duration.convert(from, DurationType.SECONDS, duration)
},
}
}
static fromSeconds(duration: number) {
return Duration.from(DurationType.SECONDS, duration)
}
static fromMinutes(duration: number) {
return Duration.from(DurationType.MINUTES, duration)
}
static fromHours(duration: number) {
return Duration.from(DurationType.HOURS, duration)
}
static fromDays(duration: number) {
return Duration.from(DurationType.DAYS, duration)
}
}

View File

@ -1,3 +1,4 @@
export * from "./hashing"
export * from "./utils"
export * from "./stringUtils"
export * from "./Duration"

View File

@ -0,0 +1,19 @@
import { Duration, DurationType } from "../Duration"
describe("duration", () => {
it("should convert minutes to milliseconds", () => {
expect(Duration.fromMinutes(5).toMs()).toBe(300000)
})
it("should convert seconds to milliseconds", () => {
expect(Duration.fromSeconds(30).toMs()).toBe(30000)
})
it("should convert days to milliseconds", () => {
expect(Duration.fromDays(1).toMs()).toBe(86400000)
})
it("should convert minutes to days", () => {
expect(Duration.fromMinutes(1440).to(DurationType.DAYS)).toBe(1)
})
})

View File

@ -188,4 +188,17 @@ describe("utils", () => {
expectResult(false)
})
})
describe("hasCircularStructure", () => {
it("should detect a circular structure", () => {
const a: any = { b: "b" }
const b = { a }
a.b = b
expect(utils.hasCircularStructure(b)).toBe(true)
})
it("should allow none circular structures", () => {
expect(utils.hasCircularStructure({ a: "b" })).toBe(false)
})
})
})

View File

@ -237,3 +237,17 @@ export function timeout(timeMs: number) {
export function isAudited(event: Event) {
return !!AuditedEventFriendlyName[event]
}
export function hasCircularStructure(json: any) {
if (typeof json !== "object") {
return false
}
try {
JSON.stringify(json)
} catch (err) {
if (err instanceof Error && err?.message.includes("circular structure")) {
return true
}
}
return false
}

View File

@ -1,2 +1,3 @@
export const MOCK_DATE = new Date("2020-01-01T00:00:00.000Z")
export const MOCK_DATE_TIMESTAMP = 1577836800000

View File

@ -12,7 +12,7 @@ import { generator } from "./generator"
import { tenant } from "."
export const newEmail = () => {
return `${uuid()}@test.com`
return `${uuid()}@example.com`
}
export const user = (userProps?: Partial<Omit<User, "userId">>): User => {

View File

@ -8,6 +8,7 @@
export let id = null
export let text = null
export let disabled = false
export let readonly = false
export let size
export let indeterminate = false
@ -24,6 +25,7 @@
class:is-invalid={!!error}
class:checked={value}
class:is-indeterminate={indeterminate}
class:readonly
>
<input
checked={value}
@ -68,4 +70,7 @@
.spectrum-Checkbox-input {
opacity: 0;
}
.readonly {
pointer-events: none;
}
</style>

View File

@ -8,6 +8,7 @@
export let options = []
export let error = null
export let disabled = false
export let readonly = false
export let getOptionLabel = option => option
export let getOptionValue = option => option
@ -34,6 +35,7 @@
title={getOptionLabel(option)}
class="spectrum-Checkbox spectrum-FieldGroup-item"
class:is-invalid={!!error}
class:readonly
>
<label
class="spectrum-Checkbox spectrum-Checkbox--sizeM spectrum-FieldGroup-item"
@ -66,4 +68,7 @@
.spectrum-Checkbox-input {
opacity: 0;
}
.readonly {
pointer-events: none;
}
</style>

View File

@ -9,6 +9,7 @@
export let id = null
export let disabled = false
export let readonly = false
export let error = null
export let enableTime = true
export let value = null
@ -186,7 +187,7 @@
>
<div
id={flatpickrId}
class:is-disabled={disabled}
class:is-disabled={disabled || readonly}
class:is-invalid={!!error}
class="flatpickr spectrum-InputGroup spectrum-Datepicker"
class:is-focused={open}
@ -211,6 +212,7 @@
{/if}
<input
{disabled}
{readonly}
data-input
type="text"
class="spectrum-Textfield-input spectrum-InputGroup-input"

View File

@ -159,8 +159,10 @@
{#if selectedImage.size}
<div class="filesize">
{#if selectedImage.size <= BYTES_IN_MB}
{`${selectedImage.size / BYTES_IN_KB} KB`}
{:else}{`${selectedImage.size / BYTES_IN_MB} MB`}{/if}
{`${(selectedImage.size / BYTES_IN_KB).toFixed(1)} KB`}
{:else}{`${(selectedImage.size / BYTES_IN_MB).toFixed(
1
)} MB`}{/if}
</div>
{/if}
{#if !disabled}
@ -203,8 +205,8 @@
{#if file.size}
<div class="filesize">
{#if file.size <= BYTES_IN_MB}
{`${file.size / BYTES_IN_KB} KB`}
{:else}{`${file.size / BYTES_IN_MB} MB`}{/if}
{`${(file.size / BYTES_IN_KB).toFixed(1)} KB`}
{:else}{`${(file.size / BYTES_IN_MB).toFixed(1)} MB`}{/if}
</div>
{/if}
{#if !disabled}
@ -384,7 +386,7 @@
}
.compact .placeholder,
.compact img {
margin: 10px 16px;
margin: 8px 16px;
}
.compact img {
height: 90px;
@ -454,6 +456,12 @@
color: var(--red);
}
.spectrum-Dropzone {
height: 220px;
}
.compact .spectrum-Dropzone {
height: 40px;
}
.spectrum-Dropzone.disabled {
pointer-events: none;
background-color: var(--spectrum-global-color-gray-200);
@ -461,10 +469,6 @@
.disabled .spectrum-Heading--sizeL {
color: var(--spectrum-alias-text-color-disabled);
}
.compact .spectrum-Dropzone {
padding-top: 8px;
padding-bottom: 8px;
}
.compact .spectrum-IllustratedMessage-description {
margin: 0;
}
@ -475,7 +479,6 @@
flex-wrap: wrap;
justify-content: center;
}
.tag {
margin-top: 8px;
}

View File

@ -8,6 +8,7 @@
export let options = []
export let error = null
export let disabled = false
export let readonly = false
export let getOptionLabel = option => option
export let getOptionValue = option => option
export let getOptionTitle = option => option
@ -40,6 +41,7 @@
title={getOptionTitle(option)}
class="spectrum-Radio spectrum-FieldGroup-item spectrum-Radio--emphasized"
class:is-invalid={!!error}
class:readonly
>
<input
on:change={onChange}
@ -62,4 +64,7 @@
.spectrum-Radio-input {
opacity: 0;
}
.readonly {
pointer-events: none;
}
</style>

View File

@ -4,6 +4,7 @@
export let value = ""
export let placeholder = null
export let disabled = false
export let readonly = false
export let error = null
export let height = null
export let id = null
@ -20,6 +21,7 @@
{fullScreenOffset}
{disabled}
{easyMDEOptions}
{readonly}
on:change
/>
</div>

View File

@ -5,6 +5,7 @@
export let value = ""
export let placeholder = null
export let disabled = false
export let readonly = false
export let error = null
export let id = null
export let height = null
@ -61,6 +62,7 @@
class="spectrum-Textfield-input"
style={align ? `text-align: ${align}` : ""}
{disabled}
{readonly}
{id}
on:focus={() => (focus = true)}
on:blur={onChange}

View File

@ -7,6 +7,7 @@
export let label = null
export let labelPosition = "above"
export let disabled = false
export let readonly = false
export let error = null
export let enableTime = true
export let timeOnly = false
@ -33,6 +34,7 @@
<DatePicker
{error}
{disabled}
{readonly}
{value}
{placeholder}
{enableTime}

View File

@ -8,6 +8,7 @@
export let id = null
export let fullScreenOffset = 0
export let disabled = false
export let readonly = false
export let easyMDEOptions
const dispatch = createEventDispatcher()
@ -19,6 +20,9 @@
// control
$: checkValue(value)
$: mde?.codemirror.on("change", debouncedUpdate)
$: if (readonly || disabled) {
mde?.togglePreview()
}
const checkValue = val => {
if (mde && val !== latestValue) {
@ -54,6 +58,7 @@
easyMDEOptions={{
initialValue: value,
placeholder,
toolbar: disabled || readonly ? false : undefined,
...easyMDEOptions,
}}
/>

View File

@ -106,6 +106,13 @@
name: fieldName,
}
}
// Delete numeric only widths as these are grid widths and should be
// ignored
const width = fixedSchema[fieldName].width
if (width != null && `${width}`.trim().match(/^[0-9]+$/)) {
delete fixedSchema[fieldName].width
}
})
return fixedSchema
}

View File

@ -2,6 +2,15 @@
--background: #ffffff;
--ink: #000000;
/* Brand colours */
--bb-coral: #FF4E4E;
--bb-coral-light: #F97777;
--bb-indigo: #6E56FF;
--bb-indigo-light: #9F8FFF;
--bb-lime: #ECFFB5;
--bb-forest-green: #053835;
--bb-beige: #F6EFEA;
--grey-1: #fafafa;
--grey-2: #f5f5f5;
--grey-3: #eeeeee;

View File

@ -5,4 +5,5 @@ package-lock.json
release/
dist/
routify
.routify/
.routify/
svelte.config.js

View File

@ -1,80 +1,13 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 24.1.2, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 48 48" style="enable-background:new 0 0 48 48;" xml:space="preserve">
<style type="text/css">
.st0{fill:#000000;}
.st1{fill:#FFFFFF;}
.st2{fill:#4285F4;}
</style>
<rect x="-152.17" y="-24.17" class="st0" width="96.17" height="96.17"/>
<path class="st1" d="M-83.19,48h-41.79c-1.76,0-3.19-1.43-3.19-3.19V3.02c0-1.76,1.43-3.19,3.19-3.19h41.79
c1.76,0,3.19,1.43,3.19,3.19v41.79C-80,46.57-81.43,48-83.19,48z"/>
<g>
<g>
<path class="st0" d="M-99.62,12.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58c0.86,0.39,1.59,0.91,2.19,1.57
c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89c-0.35,0.9-0.84,1.68-1.47,2.35
c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V35h-4.89V12.57H-99.62z
M-93.46,28.11c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69
c-0.38-0.17-0.79-0.26-1.24-0.26c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01
c-0.17,0.39-0.26,0.8-0.26,1.23c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68
c0.39,0.17,0.8,0.26,1.23,0.26c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1
C-93.55,28.92-93.46,28.52-93.46,28.11z"/>
</g>
<g>
<path class="st0" d="M-114.76,12.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58
c0.86,0.39,1.59,0.91,2.19,1.57c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89
c-0.35,0.9-0.84,1.68-1.47,2.35c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V35
h-4.89V12.57H-114.76z M-108.6,28.11c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69
c-0.38-0.17-0.79-0.26-1.24-0.26c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01
c-0.17,0.39-0.26,0.8-0.26,1.23c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68
c0.39,0.17,0.8,0.26,1.23,0.26c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1
C-108.68,28.92-108.6,28.52-108.6,28.11z"/>
</g>
</g>
<path class="st2" d="M44.81,159H3.02c-1.76,0-3.19-1.43-3.19-3.19v-41.79c0-1.76,1.43-3.19,3.19-3.19h41.79
c1.76,0,3.19,1.43,3.19,3.19v41.79C48,157.57,46.57,159,44.81,159z"/>
<g>
<g>
<path class="st1" d="M28.38,123.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58c0.86,0.39,1.59,0.91,2.19,1.57
c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89c-0.35,0.9-0.84,1.68-1.47,2.35
c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V146h-4.89v-22.43H28.38z
M34.54,139.11c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69
c-0.38-0.17-0.79-0.26-1.24-0.26c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01
c-0.17,0.39-0.26,0.8-0.26,1.23c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68
c0.39,0.17,0.8,0.26,1.23,0.26c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1
C34.45,139.92,34.54,139.52,34.54,139.11z"/>
</g>
<g>
<path class="st1" d="M13.24,123.57v9.94c1.15-1.21,2.59-1.81,4.32-1.81c1.03,0,1.97,0.19,2.82,0.58c0.86,0.39,1.59,0.91,2.19,1.57
c0.6,0.66,1.08,1.43,1.42,2.32c0.34,0.89,0.51,1.84,0.51,2.85c0,1.03-0.18,1.99-0.53,2.89c-0.35,0.9-0.84,1.68-1.47,2.35
c-0.63,0.67-1.37,1.19-2.23,1.58c-0.86,0.39-1.78,0.58-2.77,0.58c-1.8,0-3.22-0.66-4.27-1.97V146H8.35v-22.43H13.24z M19.4,139.11
c0-0.43-0.08-0.84-0.24-1.23c-0.16-0.39-0.39-0.72-0.68-1.01c-0.29-0.29-0.62-0.52-1-0.69c-0.38-0.17-0.79-0.26-1.24-0.26
c-0.43,0-0.84,0.08-1.22,0.24c-0.38,0.16-0.71,0.39-0.99,0.68c-0.28,0.29-0.5,0.63-0.68,1.01c-0.17,0.39-0.26,0.8-0.26,1.23
c0,0.43,0.08,0.84,0.24,1.22c0.16,0.38,0.39,0.71,0.68,0.99c0.29,0.28,0.63,0.5,1.01,0.68c0.39,0.17,0.8,0.26,1.23,0.26
c0.43,0,0.84-0.08,1.22-0.24c0.38-0.16,0.71-0.39,0.99-0.68c0.28-0.29,0.5-0.62,0.68-1C19.32,139.92,19.4,139.52,19.4,139.11z"/>
</g>
</g>
<g>
<path class="st0" d="M44,48H4c-2.21,0-4-1.79-4-4V4c0-2.21,1.79-4,4-4h40c2.21,0,4,1.79,4,4v40C48,46.21,46.21,48,44,48z"/>
<g>
<path class="st1" d="M28.48,12v10.44c1.18-1.27,2.65-1.9,4.42-1.9c1.05,0,2.01,0.2,2.89,0.61c0.87,0.41,1.62,0.96,2.24,1.65
c0.62,0.69,1.1,1.5,1.45,2.44c0.35,0.94,0.52,1.93,0.52,2.99c0,1.08-0.18,2.09-0.54,3.04c-0.36,0.95-0.86,1.77-1.51,2.47
c-0.64,0.7-1.4,1.25-2.28,1.66C34.8,35.8,33.86,36,32.84,36c-1.84,0-3.3-0.69-4.37-2.07v1.62h-5V12H28.48z M34.78,28.31
c0-0.45-0.08-0.88-0.25-1.29c-0.17-0.41-0.4-0.76-0.69-1.06c-0.3-0.3-0.64-0.54-1.02-0.72c-0.39-0.18-0.81-0.27-1.27-0.27
c-0.44,0-0.86,0.09-1.24,0.26c-0.39,0.17-0.72,0.41-1.01,0.71c-0.29,0.3-0.52,0.66-0.69,1.06c-0.18,0.41-0.26,0.84-0.26,1.29
s0.08,0.88,0.25,1.28c0.17,0.4,0.4,0.74,0.69,1.04c0.29,0.29,0.64,0.53,1.04,0.71c0.4,0.18,0.82,0.27,1.26,0.27
c0.44,0,0.86-0.09,1.24-0.26c0.39-0.17,0.72-0.41,1.01-0.71c0.29-0.3,0.52-0.65,0.69-1.05C34.69,29.16,34.78,28.75,34.78,28.31z"
/>
</g>
<g>
<path class="st1" d="M13,12v10.44c1.18-1.27,2.65-1.9,4.42-1.9c1.05,0,2.01,0.2,2.89,0.61c0.87,0.41,1.62,0.96,2.24,1.65
c0.62,0.69,1.1,1.5,1.45,2.44c0.35,0.94,0.52,1.93,0.52,2.99c0,1.08-0.18,2.09-0.54,3.04c-0.36,0.95-0.86,1.77-1.51,2.47
c-0.64,0.7-1.4,1.25-2.28,1.66C19.32,35.8,18.38,36,17.37,36c-1.84,0-3.3-0.69-4.37-2.07v1.62H8V12H13z M19.3,28.31
c0-0.45-0.08-0.88-0.25-1.29c-0.17-0.41-0.4-0.76-0.69-1.06c-0.3-0.3-0.64-0.54-1.02-0.72c-0.39-0.18-0.81-0.27-1.27-0.27
c-0.44,0-0.86,0.09-1.24,0.26c-0.39,0.17-0.72,0.41-1.01,0.71c-0.29,0.3-0.52,0.66-0.69,1.06c-0.18,0.41-0.26,0.84-0.26,1.29
s0.08,0.88,0.25,1.28c0.17,0.4,0.4,0.74,0.69,1.04c0.29,0.29,0.64,0.53,1.04,0.71c0.4,0.18,0.82,0.27,1.26,0.27
c0.44,0,0.86-0.09,1.24-0.26c0.39-0.17,0.72-0.41,1.01-0.71c0.29-0.3,0.52-0.65,0.69-1.05C19.21,29.16,19.3,28.75,19.3,28.31z"/>
</g>
<svg width="265" height="265" viewBox="0 0 265 265" fill="none" xmlns="http://www.w3.org/2000/svg">
<g clip-path="url(#clip0_1_1799)">
<path d="M158.2 8.6V116.6C158.2 121.3 162 125.2 166.8 125.2H213.8C218 125.2 222 123.2 224.6 119.8L262.9 68.9C265.7 65.2 265.7 60.1 262.9 56.4L224.6 5.4C222 2 218 0 213.8 0H166.8C162 0 158.2 3.8 158.2 8.6Z" fill="#FF4E4E"/>
<path d="M158.2 148.4V256.4C158.2 261.1 162 265 166.8 265H213.8C218 265 222 263 224.6 259.6L262.9 208.7C265.7 205 265.7 199.9 262.9 196.2L224.6 145.3C222.1 141.9 218.1 139.9 213.8 139.9H166.8C162 139.8 158.2 143.7 158.2 148.4Z" fill="#6E56FF"/>
<path d="M0 8.6V116.6C0 121.3 3.8 125.2 8.6 125.2H109.6C113.8 125.2 117.8 123.2 120.4 119.8L155.9 72.5C160.3 66.6 160.3 58.5 155.9 52.6L120.3 5.4C117.8 2 113.8 0 109.5 0H8.6C3.8 0 0 3.8 0 8.6Z" fill="#F97777"/>
<path d="M0 148.4V256.4C0 261.1 3.8 265 8.6 265H109.6C113.8 265 117.8 263 120.4 259.6L155.9 212.3C160.3 206.4 160.3 198.3 155.9 192.4L120.4 145.1C117.9 141.7 113.9 139.7 109.6 139.7H8.6C3.8 139.8 0 143.7 0 148.4Z" fill="#9F8FFF"/>
</g>
<defs>
<clipPath id="clip0_1_1799">
<rect width="265" height="265" fill="white"/>
</clipPath>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 6.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@ -1,18 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 25.2.3, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 46.39 46.39" style="enable-background:new 0 0 46.39 46.39;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<g>
<path class="st0" d="M45.28,22.9c-0.49,28.44-42.79,28.43-43.27,0C2.5-5.54,44.8-5.54,45.28,22.9z"/>
<path d="M23.65,45.45c-29.64-0.48-29.63-44.63,0-45.1C53.28,0.82,53.28,44.98,23.65,45.45z M23.65,2.18
c-27.09,0.09-27.09,41.36,0,41.44C50.74,43.53,50.74,2.26,23.65,2.18z"/>
<path d="M41.94,21.07C38.86,8.69,3.47,8.77,5.01,24.45C5.74,49.51,46.24,46.16,41.94,21.07z"/>
<path class="st0" d="M14.69,22.35c0.06,4.27-6.65,4.27-6.58,0C8.05,18.08,14.76,18.08,14.69,22.35z"/>
<path class="st0" d="M11.07,28.39c0.02,1.7-2.65,1.7-2.62,0C8.42,26.68,11.09,26.68,11.07,28.39z"/>
<path class="st0" d="M30.56,16.2c0.28-1.27,6.76,0.79,8.45,5.64c1.69,4.84-2.11,12.22-3.52,11.43
C36.02,25.33,37.44,22.84,30.56,16.2z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.2 KiB

View File

@ -1,25 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 25.2.3, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 34.41 34.41" style="enable-background:new 0 0 34.41 34.41;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;stroke:url(#SVGID_1_);stroke-miterlimit:10;}
.st1{fill:url(#SVGID_2_);}
.st2{fill:#FFFFFF;}
</style>
<g>
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="0.9311" y1="17.2025" x2="33.4739" y2="17.2025">
<stop offset="0" style="stop-color:#9E99FF"/>
<stop offset="1" style="stop-color:#5C45FF"/>
</linearGradient>
<path class="st0" d="M17.2,33.2c-21.03-0.34-21.03-31.67,0-32C38.23,1.54,38.23,32.87,17.2,33.2z"/>
<linearGradient id="SVGID_2_" gradientUnits="userSpaceOnUse" x1="3.9435" y1="20.058" x2="30.407" y2="20.058">
<stop offset="0" style="stop-color:#9E99FF"/>
<stop offset="1" style="stop-color:#5C45FF"/>
</linearGradient>
<path class="st1" d="M30.18,15.91C27.99,7.12,2.89,7.18,3.98,18.3C4.5,36.09,33.23,33.71,30.18,15.91z"/>
<path class="st2" d="M6.42,21.1c-0.02-1.21,1.88-1.21,1.86,0C8.29,22.3,6.4,22.3,6.42,21.1z"/>
<path class="st2" d="M6.18,16.81c-0.04-3.03,4.72-3.03,4.67,0C10.89,19.84,6.13,19.84,6.18,16.81z"/>
<path class="st2" d="M25.61,24.56c0.38-5.63,1.38-7.4-3.5-12.11c0.2-0.9,4.8,0.56,6,4C29.3,19.88,26.61,25.12,25.61,24.56z"/>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 1.6 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 787 B

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@ -5,6 +5,7 @@ import {
encodeJSBinding,
findHBSBlocks,
} from "@budibase/string-templates"
import { capitalise } from "helpers"
/**
* Recursively searches for a specific component ID
@ -235,3 +236,13 @@ export const makeComponentUnique = component => {
// Recurse on all children
return JSON.parse(definition)
}
export const getComponentText = component => {
if (component?._instanceName) {
return component._instanceName
}
const type =
component._component.replace("@budibase/standard-components/", "") ||
"component"
return capitalise(type)
}

View File

@ -3,6 +3,7 @@ import { API } from "api"
import { cloneDeep } from "lodash/fp"
import { generate } from "shortid"
import { selectedAutomation } from "builderStore"
import { notifications } from "@budibase/bbui"
const initialAutomationState = {
automations: [],
@ -21,6 +22,37 @@ export const getAutomationStore = () => {
return store
}
const updateReferencesInObject = (obj, modifiedIndex, action) => {
const regex = /{{\s*steps\.(\d+)\./g
for (const key in obj) {
if (typeof obj[key] === "string") {
let matches
while ((matches = regex.exec(obj[key])) !== null) {
const referencedStep = parseInt(matches[1])
if (action === "add" && referencedStep >= modifiedIndex) {
obj[key] = obj[key].replace(
`{{ steps.${referencedStep}.`,
`{{ steps.${referencedStep + 1}.`
)
} else if (action === "delete" && referencedStep > modifiedIndex) {
obj[key] = obj[key].replace(
`{{ steps.${referencedStep}.`,
`{{ steps.${referencedStep - 1}.`
)
}
}
} else if (typeof obj[key] === "object" && obj[key] !== null) {
updateReferencesInObject(obj[key], modifiedIndex, action)
}
}
}
const updateStepReferences = (steps, modifiedIndex, action) => {
steps.forEach(step => {
updateReferencesInObject(step.inputs, modifiedIndex, action)
})
}
const automationActions = store => ({
definitions: async () => {
const response = await API.getAutomationDefinitions()
@ -218,10 +250,40 @@ const automationActions = store => ({
if (!automation) {
return
}
try {
updateStepReferences(newAutomation.definition.steps, blockIdx, "add")
} catch (e) {
notifications.error("Error adding automation block")
}
newAutomation.definition.steps.splice(blockIdx, 0, block)
await store.actions.save(newAutomation)
},
deleteAutomationBlock: async block => {
saveAutomationName: async (blockId, name) => {
const automation = get(selectedAutomation)
let newAutomation = cloneDeep(automation)
if (!automation) {
return
}
newAutomation.definition.stepNames = {
...newAutomation.definition.stepNames,
[blockId]: name.trim(),
}
await store.actions.save(newAutomation)
},
deleteAutomationName: async blockId => {
const automation = get(selectedAutomation)
let newAutomation = cloneDeep(automation)
if (!automation) {
return
}
delete newAutomation.definition.stepNames[blockId]
await store.actions.save(newAutomation)
},
deleteAutomationBlock: async (block, blockIdx) => {
const automation = get(selectedAutomation)
let newAutomation = cloneDeep(automation)
@ -233,7 +295,14 @@ const automationActions = store => ({
newAutomation.definition.steps = newAutomation.definition.steps.filter(
step => step.id !== block.id
)
delete newAutomation.definition.stepNames?.[block.id]
}
try {
updateStepReferences(newAutomation.definition.steps, blockIdx, "delete")
} catch (e) {
notifications.error("Error deleting automation block")
}
await store.actions.save(newAutomation)
},
replace: async (automationId, automation) => {

View File

@ -580,7 +580,7 @@ export const getFrontendStore = () => {
let table = validTables.find(table => {
return (
table.sourceId !== BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL
table.sourceType === DB_TYPE_INTERNAL
)
})
if (table) {
@ -591,7 +591,7 @@ export const getFrontendStore = () => {
table = validTables.find(table => {
return (
table.sourceId === BUDIBASE_INTERNAL_DB_ID &&
table.type === DB_TYPE_INTERNAL
table.sourceType === DB_TYPE_INTERNAL
)
})
if (table) {
@ -599,7 +599,7 @@ export const getFrontendStore = () => {
}
// Finally try an external table
return validTables.find(table => table.type === DB_TYPE_EXTERNAL)
return validTables.find(table => table.sourceType === DB_TYPE_EXTERNAL)
},
enrichEmptySettings: (component, opts) => {
if (!component?._component) {

View File

@ -2,14 +2,14 @@ import sanitizeUrl from "./utils/sanitizeUrl"
import { Screen } from "./utils/Screen"
import { Component } from "./utils/Component"
export default function (datasources) {
export default function (datasources, mode = "table") {
if (!Array.isArray(datasources)) {
return []
}
return datasources.map(datasource => {
return {
name: `${datasource.label} - List`,
create: () => createScreen(datasource),
create: () => createScreen(datasource, mode),
id: ROW_LIST_TEMPLATE,
resourceId: datasource.resourceId,
}
@ -40,10 +40,24 @@ const generateTableBlock = datasource => {
return tableBlock
}
const createScreen = datasource => {
const generateGridBlock = datasource => {
const gridBlock = new Component("@budibase/standard-components/gridblock")
gridBlock
.customProps({
table: datasource,
})
.instanceName(`${datasource.label} - Grid block`)
return gridBlock
}
const createScreen = (datasource, mode) => {
return new Screen()
.route(rowListUrl(datasource))
.instanceName(`${datasource.label} - List`)
.addChild(generateTableBlock(datasource))
.addChild(
mode === "table"
? generateTableBlock(datasource)
: generateGridBlock(datasource)
)
.json()
}

View File

@ -5,13 +5,7 @@
import TestDataModal from "./TestDataModal.svelte"
import { flip } from "svelte/animate"
import { fly } from "svelte/transition"
import {
Heading,
Icon,
ActionButton,
notifications,
Modal,
} from "@budibase/bbui"
import { Icon, notifications, Modal } from "@budibase/bbui"
import { ActionStepID } from "constants/backend/automations"
import UndoRedoControl from "components/common/UndoRedoControl.svelte"
import { automationHistoryStore } from "builderStore"
@ -20,9 +14,8 @@
let testDataModal
let confirmDeleteDialog
$: blocks = getBlocks(automation)
let scrolling = false
$: blocks = getBlocks(automation).filter(x => x.stepId !== ActionStepID.LOOP)
const getBlocks = automation => {
let blocks = []
if (automation.definition.trigger) {
@ -32,58 +25,71 @@
return blocks
}
async function deleteAutomation() {
const deleteAutomation = async () => {
try {
await automationStore.actions.delete($selectedAutomation)
} catch (error) {
notifications.error("Error deleting automation")
}
}
const handleScroll = e => {
if (e.target.scrollTop >= 30) {
scrolling = true
} else if (e.target.scrollTop) {
// Set scrolling back to false if scrolled back to less than 100px
scrolling = false
}
}
</script>
<div class="canvas">
<div class="header">
<Heading size="S">{automation.name}</Heading>
<div class="controls">
<UndoRedoControl store={automationHistoryStore} />
<div class="header" class:scrolling>
<div class="header-left">
<UndoRedoControl store={automationHistoryStore} />
</div>
<div class="controls">
<div
on:click={() => {
testDataModal.show()
}}
class="buttons"
>
<Icon hoverable size="M" name="Play" />
<div>Run test</div>
</div>
<div class="buttons">
<Icon
on:click={confirmDeleteDialog.show}
disabled={!$automationStore.testResults}
hoverable
size="M"
name="DeleteOutline"
name="Multiple"
/>
<div class="buttons">
<ActionButton
on:click={() => {
testDataModal.show()
}}
icon="MultipleCheck"
size="M">Run test</ActionButton
>
<ActionButton
disabled={!$automationStore.testResults}
on:click={() => {
$automationStore.showTestPanel = true
}}
size="M">Test Details</ActionButton
>
<div
class:disabled={!$automationStore.testResults}
on:click={() => {
$automationStore.showTestPanel = true
}}
>
Test details
</div>
</div>
</div>
</div>
<div class="content">
{#each blocks as block, idx (block.id)}
<div
class="block"
animate:flip={{ duration: 500 }}
in:fly={{ x: 500, duration: 500 }}
out:fly|local={{ x: 500, duration: 500 }}
>
{#if block.stepId !== ActionStepID.LOOP}
<FlowItem {testDataModal} {block} {idx} />
{/if}
</div>
{/each}
<div class="canvas" on:scroll={handleScroll}>
<div class="content">
{#each blocks as block, idx (block.id)}
<div
class="block"
animate:flip={{ duration: 500 }}
in:fly={{ x: 500, duration: 500 }}
out:fly|local={{ x: 500, duration: 500 }}
>
{#if block.stepId !== ActionStepID.LOOP}
<FlowItem {testDataModal} {block} {idx} />
{/if}
</div>
{/each}
</div>
</div>
<ConfirmDialog
bind:this={confirmDeleteDialog}
@ -103,6 +109,12 @@
<style>
.canvas {
padding: var(--spacing-l) var(--spacing-xl);
overflow-y: auto;
max-height: 100%;
}
.header-left :global(div) {
border-right: none;
}
/* Fix for firefox not respecting bottom padding in scrolling containers */
.canvas > *:last-child {
@ -117,23 +129,45 @@
}
.content {
display: inline-block;
text-align: left;
flex-grow: 1;
padding: 23px 23px 80px;
box-sizing: border-box;
}
.header.scrolling {
background: var(--background);
border-bottom: var(--border-light);
border-left: var(--border-light);
z-index: 1;
}
.header {
z-index: 1;
display: flex;
justify-content: space-between;
align-items: center;
padding-left: var(--spacing-l);
transition: background 130ms ease-out;
flex: 0 0 48px;
padding-right: var(--spacing-xl);
}
.controls {
display: flex;
gap: var(--spacing-xl);
}
.controls,
.buttons {
display: flex;
justify-content: flex-end;
align-items: center;
gap: var(--spacing-xl);
}
.buttons {
gap: var(--spacing-s);
}
.buttons:hover {
cursor: pointer;
}
.disabled {
pointer-events: none;
color: var(--spectrum-global-color-gray-500) !important;
}
</style>

View File

@ -7,20 +7,16 @@
Detail,
Modal,
Button,
ActionButton,
notifications,
Label,
AbsTooltip,
} from "@budibase/bbui"
import AutomationBlockSetup from "../../SetupPanel/AutomationBlockSetup.svelte"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import ActionModal from "./ActionModal.svelte"
import FlowItemHeader from "./FlowItemHeader.svelte"
import RoleSelect from "components/design/settings/controls/RoleSelect.svelte"
import {
ActionStepID,
TriggerStepID,
Features,
} from "constants/backend/automations"
import { ActionStepID, TriggerStepID } from "constants/backend/automations"
import { permissions } from "stores/backend"
export let block
@ -86,7 +82,7 @@
if (loopBlock) {
await automationStore.actions.deleteAutomationBlock(loopBlock)
}
await automationStore.actions.deleteAutomationBlock(block)
await automationStore.actions.deleteAutomationBlock(block, blockIdx)
} catch (error) {
notifications.error("Error saving automation")
}
@ -129,6 +125,10 @@
</div>
<div class="blockTitle">
<AbsTooltip type="negative" text="Remove looping">
<Icon on:click={removeLooping} hoverable name="DeleteOutline" />
</AbsTooltip>
<div style="margin-left: 10px;" on:click={() => {}}>
<Icon hoverable name={showLooping ? "ChevronDown" : "ChevronUp"} />
</div>
@ -139,9 +139,6 @@
<Divider noMargin />
{#if !showLooping}
<div class="blockSection">
<div class="block-options">
<ActionButton on:click={() => removeLooping()} icon="DeleteOutline" />
</div>
<Layout noPadding gap="S">
<AutomationBlockSetup
schemaProperties={Object.entries(
@ -162,31 +159,19 @@
{block}
{testDataModal}
{idx}
{addLooping}
{deleteStep}
on:toggle={() => (open = !open)}
/>
{#if open}
<Divider noMargin />
<div class="blockSection">
<Layout noPadding gap="S">
{#if !isTrigger}
<div>
<div class="block-options">
{#if !loopBlock && (block?.features?.[Features.LOOPING] || !block.features)}
<ActionButton on:click={() => addLooping()} icon="Reuse">
Add Looping
</ActionButton>
{/if}
<ActionButton
on:click={() => deleteStep()}
icon="DeleteOutline"
/>
</div>
</div>
{/if}
{#if isAppAction}
<Label>Role</Label>
<RoleSelect bind:value={role} />
<div>
<Label>Role</Label>
<RoleSelect bind:value={role} />
</div>
{/if}
<AutomationBlockSetup
schemaProperties={Object.entries(block.schema.inputs.properties)}
@ -270,5 +255,6 @@
.blockTitle {
display: flex;
align-items: center;
gap: var(--spacing-s);
}
</style>

View File

@ -1,8 +1,9 @@
<script>
import { automationStore } from "builderStore"
import { Icon, Body, Detail, StatusLight } from "@budibase/bbui"
import { automationStore, selectedAutomation } from "builderStore"
import { Icon, Body, StatusLight, AbsTooltip } from "@budibase/bbui"
import { externalActions } from "./ExternalActions"
import { createEventDispatcher } from "svelte"
import { Features } from "constants/backend/automations"
export let block
export let open
@ -10,9 +11,20 @@
export let testResult
export let isTrigger
export let idx
export let addLooping
export let deleteStep
export let enableNaming = true
let validRegex = /^[A-Za-z0-9_\s]+$/
let typing = false
const dispatch = createEventDispatcher()
$: stepNames = $selectedAutomation?.definition.stepNames
$: automationName = stepNames?.[block.id] || block?.name || ""
$: automationNameError = getAutomationNameError(automationName)
$: status = updateStatus(testResult, isTrigger)
$: isHeaderTrigger = isTrigger || block.type === "TRIGGER"
$: {
if (!testResult) {
testResult = $automationStore.testResults?.steps?.filter(step =>
@ -20,8 +32,9 @@
)?.[0]
}
}
$: isTrigger = isTrigger || block.type === "TRIGGER"
$: status = updateStatus(testResult, isTrigger)
$: loopBlock = $selectedAutomation?.definition.steps.find(
x => x.blockToLoop === block?.id
)
async function onSelect(block) {
await automationStore.update(state => {
@ -43,10 +56,49 @@
return { negative: true, message: "Error" }
}
}
const getAutomationNameError = name => {
if (stepNames) {
for (const [key, value] of Object.entries(stepNames)) {
if (name === value && key !== block.id) {
return "This name already exists, please enter a unique name"
}
}
}
if (name !== block.name && name?.length > 0) {
let invalidRoleName = !validRegex.test(name)
if (invalidRoleName) {
return "Please enter a role name consisting of only alphanumeric symbols and underscores"
}
return null
}
}
const startTyping = async () => {
typing = true
}
const saveName = async () => {
if (automationNameError || block.name === automationName) {
return
}
if (automationName.length === 0) {
await automationStore.actions.deleteAutomationName(block.id)
} else {
await automationStore.actions.saveAutomationName(block.id, automationName)
}
}
</script>
<div class="blockSection">
<div on:click={() => dispatch("toggle")} class="splitHeader">
<div
class:typing={typing && !automationNameError}
class:typing-error={automationNameError}
class="blockSection"
>
<div class="splitHeader">
<div class="center-items">
{#if externalActions[block.stepId]}
<img
@ -67,40 +119,115 @@
</svg>
{/if}
<div class="iconAlign">
{#if isTrigger}
{#if isHeaderTrigger}
<Body size="XS"><b>Trigger</b></Body>
<Body size="XS">When this happens:</Body>
{:else}
<Body size="XS"><b>Step {idx}</b></Body>
<Body size="XS">Do this:</Body>
<div style="margin-left: 2px;">
<Body size="XS"><b>Step {idx}</b></Body>
</div>
{/if}
{#if enableNaming}
<input
class="input-text"
disabled={!enableNaming}
placeholder="Enter some text"
name="name"
autocomplete="off"
value={automationName}
on:input={e => {
automationName = e.target.value.trim()
}}
on:click={startTyping}
on:blur={async () => {
typing = false
if (automationNameError) {
automationName = stepNames[block.id] || block?.name
} else {
await saveName()
}
}}
/>
{:else}
<div class="input-text">
{automationName}
</div>
{/if}
<Detail size="S">{block?.name?.toUpperCase() || ""}</Detail>
</div>
</div>
<div class="blockTitle">
{#if showTestStatus && testResult}
<div style="float: right;">
<StatusLight
positive={status?.positive}
yellow={status?.yellow}
negative={status?.negative}
><Body size="XS">{status?.message}</Body></StatusLight
>
<div class="status-container">
<div style="float:right;">
<StatusLight
positive={status?.positive}
yellow={status?.yellow}
negative={status?.negative}
>
<Body size="XS">{status?.message}</Body>
</StatusLight>
</div>
<Icon
on:click={() => dispatch("toggle")}
hoverable
name={open ? "ChevronUp" : "ChevronDown"}
/>
</div>
{/if}
<div
style="margin-left: 10px; margin-bottom: var(--spacing-xs);"
class="context-actions"
class:hide-context-actions={typing}
on:click={() => {
onSelect(block)
}}
>
<Icon hoverable name={open ? "ChevronUp" : "ChevronDown"} />
{#if !showTestStatus}
{#if !isHeaderTrigger && !loopBlock && (block?.features?.[Features.LOOPING] || !block.features)}
<AbsTooltip type="info" text="Add looping">
<Icon on:click={addLooping} hoverable name="RotateCW" />
</AbsTooltip>
{/if}
{#if !isHeaderTrigger}
<AbsTooltip type="negative" text="Delete step">
<Icon on:click={deleteStep} hoverable name="DeleteOutline" />
</AbsTooltip>
{/if}
{/if}
{#if !showTestStatus}
<Icon
on:click={() => dispatch("toggle")}
hoverable
name={open ? "ChevronUp" : "ChevronDown"}
/>
{/if}
</div>
{#if automationNameError}
<div class="error-container">
<AbsTooltip type="negative" text={automationNameError}>
<div class="error-icon">
<Icon size="S" name="Alert" />
</div>
</AbsTooltip>
</div>
{/if}
</div>
</div>
</div>
<style>
.status-container {
display: flex;
align-items: center;
justify-content: space-between;
gap: var(--spacing-m);
/* You can also add padding or margin to adjust the spacing between the text and the chevron if needed. */
}
.context-actions {
display: flex;
gap: var(--spacing-l);
margin-bottom: var(--spacing-xs);
}
.center-items {
display: flex;
align-items: center;
@ -117,10 +244,58 @@
.blockSection {
padding: var(--spacing-xl);
border: 1px solid transparent;
}
.blockTitle {
display: flex;
align-items: center;
}
.hide-context-actions {
display: none;
}
input {
color: var(--ink);
background-color: transparent;
border: 1px solid transparent;
width: 230px;
box-sizing: border-box;
overflow: hidden;
white-space: nowrap;
}
.input-text {
font-size: var(--spectrum-alias-font-size-default);
font-family: var(--font-sans);
text-overflow: ellipsis;
}
input:focus {
outline: none;
}
/* Hide arrows for number fields */
input::-webkit-outer-spin-button,
input::-webkit-inner-spin-button {
-webkit-appearance: none;
margin: 0;
}
.typing {
border: 1px solid var(--spectrum-global-color-static-blue-500);
border-radius: 4px 4px 4px 4px;
}
.typing-error {
border: 1px solid var(--spectrum-global-color-static-red-500);
border-radius: 4px 4px 4px 4px;
}
.error-icon :global(.spectrum-Icon) {
fill: var(--spectrum-global-color-red-400);
}
.error-container {
padding-top: var(--spacing-xl);
}
</style>

View File

@ -60,6 +60,7 @@
<ModalContent
title="Add test data"
confirmText="Test"
size="M"
showConfirmButton={true}
disabled={isError}
onConfirm={testAutomation}

View File

@ -48,6 +48,7 @@
<div class="block" style={width ? `width: ${width}` : ""}>
{#if block.stepId !== ActionStepID.LOOP}
<FlowItemHeader
enableNaming={false}
open={!!openBlocks[block.id]}
on:toggle={() => (openBlocks[block.id] = !openBlocks[block.id])}
isTrigger={idx === 0}

View File

@ -58,7 +58,6 @@
let fillWidth = true
let inputData
let codeBindingOpen = false
$: filters = lookForFilters(schemaProperties) || []
$: tempFilters = filters
$: stepId = block.stepId
@ -155,7 +154,7 @@
}
let blockIdx = allSteps.findIndex(step => step.id === block.id)
// Extract all outputs from all previous steps as available bindins
// Extract all outputs from all previous steps as available bindingsx§x
let bindings = []
let loopBlockCount = 0
for (let idx = 0; idx < blockIdx; idx++) {
@ -183,20 +182,19 @@
}
}
const outputs = Object.entries(schema)
let bindingIcon = ""
let bindindingRank = 0
let bindingRank = 0
if (idx === 0) {
bindingIcon = automation.trigger.icon
} else if (isLoopBlock) {
bindingIcon = "Reuse"
bindindingRank = idx + 1
bindingRank = idx + 1
} else {
bindingIcon = allSteps[idx].icon
bindindingRank = idx - loopBlockCount
bindingRank = idx - loopBlockCount
}
let bindingName =
automation.stepNames?.[allSteps[idx - loopBlockCount].id]
bindings = bindings.concat(
outputs.map(([name, value]) => {
let runtimeName = isLoopBlock
@ -205,14 +203,20 @@
? `steps[${idx - loopBlockCount}].${name}`
: `steps.${idx - loopBlockCount}.${name}`
const runtime = idx === 0 ? `trigger.${name}` : runtimeName
const categoryName =
idx === 0
? "Trigger outputs"
: isLoopBlock
? "Loop Outputs"
: `Step ${idx - loopBlockCount} outputs`
let categoryName
if (idx === 0) {
categoryName = "Trigger outputs"
} else if (isLoopBlock) {
categoryName = "Loop Outputs"
} else if (bindingName) {
categoryName = `${bindingName} outputs`
} else {
categoryName = `Step ${idx - loopBlockCount} outputs`
}
return {
readableBinding: runtime,
readableBinding: bindingName ? `${bindingName}.${name}` : runtime,
runtimeBinding: runtime,
type: value.type,
description: value.description,
@ -221,7 +225,7 @@
display: {
type: value.type,
name: name,
rank: bindindingRank,
rank: bindingRank,
},
}
})
@ -277,6 +281,16 @@
return !dependsOn || !!inputData[dependsOn]
}
function shouldRenderField(value) {
return (
value.customType !== "row" &&
value.customType !== "code" &&
value.customType !== "queryParams" &&
value.customType !== "cron" &&
value.customType !== "triggerSchema"
)
}
onMount(async () => {
try {
await environment.loadVariables()
@ -289,245 +303,248 @@
<div class="fields">
{#each schemaProperties as [key, value]}
{#if canShowField(key, value)}
<div class="block-field">
{#if key !== "fields" && value.type !== "boolean"}
<div class:block-field={shouldRenderField(value)}>
{#if key !== "fields" && value.type !== "boolean" && shouldRenderField(value)}
<Label
tooltip={value.title === "Binding / Value"
? "If using the String input type, please use a comma or newline separated string"
: null}>{value.title || (key === "row" ? "Table" : key)}</Label
>
{/if}
{#if value.type === "string" && value.enum && canShowField(key, value)}
<Select
on:change={e => onChange(e, key)}
value={inputData[key]}
placeholder={false}
options={value.enum}
getOptionLabel={(x, idx) => (value.pretty ? value.pretty[idx] : x)}
/>
{:else if value.type === "json"}
<Editor
editorHeight="250"
editorWidth="448"
mode="json"
value={inputData[key]?.value}
on:change={e => {
onChange(e, key)
}}
/>
{:else if value.type === "boolean"}
<div style="margin-top: 10px">
<Checkbox
text={value.title}
value={inputData[key]}
<div class:field-width={shouldRenderField(value)}>
{#if value.type === "string" && value.enum && canShowField(key, value)}
<Select
on:change={e => onChange(e, key)}
/>
</div>
{:else if value.type === "date"}
<DrawerBindableSlot
fillWidth
title={value.title}
panel={AutomationBindingPanel}
type={"date"}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<DatePicker
value={inputData[key]}
on:change={e => onChange(e, key)}
placeholder={false}
options={value.enum}
getOptionLabel={(x, idx) =>
value.pretty ? value.pretty[idx] : x}
/>
</DrawerBindableSlot>
{:else if value.customType === "column"}
<Select
on:change={e => onChange(e, key)}
value={inputData[key]}
options={Object.keys(table?.schema || {})}
/>
{:else if value.customType === "filters"}
<ActionButton on:click={drawer.show}>Define filters</ActionButton>
<Drawer bind:this={drawer} {fillWidth} title="Filtering">
<Button cta slot="buttons" on:click={() => saveFilters(key)}>
Save
</Button>
<FilterDrawer
slot="body"
{filters}
{bindings}
{schemaFields}
datasource={{ type: "table", tableId }}
panel={AutomationBindingPanel}
fillWidth
on:change={e => (tempFilters = e.detail)}
{:else if value.type === "json"}
<Editor
editorHeight="250"
editorWidth="448"
mode="json"
value={inputData[key]?.value}
on:change={e => {
onChange(e, key)
}}
/>
</Drawer>
{:else if value.customType === "password"}
<Input
type="password"
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "email"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type="email"
on:change={e => onChange(e, key)}
{bindings}
fillWidth
updateOnChange={false}
/>
{:else}
<DrawerBindableInput
{:else if value.type === "boolean"}
<div style="margin-top: 10px">
<Checkbox
text={value.title}
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
</div>
{:else if value.type === "date"}
<DrawerBindableSlot
fillWidth
title={value.title}
panel={AutomationBindingPanel}
type="email"
type={"date"}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
allowJS={false}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
/>
{/if}
{:else if value.customType === "query"}
<QuerySelector
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "cron"}
<CronBuilder
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "queryParams"}
<QueryParamSelector
on:change={e => onChange(e, key)}
value={inputData[key]}
{bindings}
/>
{:else if value.customType === "table"}
<TableSelector
{isTrigger}
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
{:else if value.customType === "row"}
<RowSelector
value={inputData[key]}
meta={inputData["meta"] || {}}
on:change={e => {
if (e.detail?.key) {
onChange(e, e.detail.key)
} else {
onChange(e, key)
}
}}
{bindings}
{isTestModal}
{isUpdateRow}
/>
{:else if value.customType === "webhookUrl"}
<WebhookDisplay
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "fields"}
<FieldSelector
{block}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
{isTestModal}
/>
{:else if value.customType === "triggerSchema"}
<SchemaSetup
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "code"}
<CodeEditorModal>
{#if codeMode == EditorModes.JS}
<ActionButton
on:click={() => (codeBindingOpen = !codeBindingOpen)}
quiet
icon={codeBindingOpen ? "ChevronDown" : "ChevronRight"}
>
<Detail size="S">Bindings</Detail>
</ActionButton>
{#if codeBindingOpen}
<pre>{JSON.stringify(bindings, null, 2)}</pre>
{/if}
{/if}
<CodeEditor
value={inputData[key]}
on:change={e => {
// need to pass without the value inside
onChange({ detail: e.detail }, key)
inputData[key] = e.detail
}}
completions={stepCompletions}
mode={codeMode}
autocompleteEnabled={codeMode != EditorModes.JS}
height={500}
/>
<div class="messaging">
{#if codeMode == EditorModes.Handlebars}
<Icon name="FlashOn" />
<div class="messaging-wrap">
<div>
Add available bindings by typing <strong>
&#125;&#125;
</strong>
</div>
</div>
{/if}
</div>
</CodeEditorModal>
{:else if value.customType === "loopOption"}
<Select
on:change={e => onChange(e, key)}
autoWidth
value={inputData[key]}
options={["Array", "String"]}
defaultValue={"Array"}
/>
{:else if value.type === "string" || value.type === "number" || value.type === "integer"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type={value.customType}
>
<DatePicker
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
</DrawerBindableSlot>
{:else if value.customType === "column"}
<Select
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
value={inputData[key]}
options={Object.keys(table?.schema || {})}
/>
{:else}
<div class="test">
{:else if value.customType === "filters"}
<ActionButton on:click={drawer.show}>Define filters</ActionButton>
<Drawer bind:this={drawer} {fillWidth} title="Filtering">
<Button cta slot="buttons" on:click={() => saveFilters(key)}>
Save
</Button>
<FilterDrawer
slot="body"
{filters}
{bindings}
{schemaFields}
datasource={{ type: "table", tableId }}
panel={AutomationBindingPanel}
fillWidth
on:change={e => (tempFilters = e.detail)}
/>
</Drawer>
{:else if value.customType === "password"}
<Input
type="password"
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "email"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type="email"
on:change={e => onChange(e, key)}
{bindings}
fillWidth
updateOnChange={false}
/>
{:else}
<DrawerBindableInput
fillWidth={true}
fillWidth
title={value.title}
panel={AutomationBindingPanel}
type={value.customType}
type="email"
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
allowJS={false}
updateOnChange={false}
placeholder={value.customType === "queryLimit"
? queryLimit
: ""}
drawerLeft="260px"
/>
</div>
{/if}
{:else if value.customType === "query"}
<QuerySelector
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "cron"}
<CronBuilder
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "queryParams"}
<QueryParamSelector
on:change={e => onChange(e, key)}
value={inputData[key]}
{bindings}
/>
{:else if value.customType === "table"}
<TableSelector
{isTrigger}
value={inputData[key]}
on:change={e => onChange(e, key)}
/>
{:else if value.customType === "row"}
<RowSelector
value={inputData[key]}
meta={inputData["meta"] || {}}
on:change={e => {
if (e.detail?.key) {
onChange(e, e.detail.key)
} else {
onChange(e, key)
}
}}
{bindings}
{isTestModal}
{isUpdateRow}
/>
{:else if value.customType === "webhookUrl"}
<WebhookDisplay
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "fields"}
<FieldSelector
{block}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
{isTestModal}
/>
{:else if value.customType === "triggerSchema"}
<SchemaSetup
on:change={e => onChange(e, key)}
value={inputData[key]}
/>
{:else if value.customType === "code"}
<CodeEditorModal>
{#if codeMode == EditorModes.JS}
<ActionButton
on:click={() => (codeBindingOpen = !codeBindingOpen)}
quiet
icon={codeBindingOpen ? "ChevronDown" : "ChevronRight"}
>
<Detail size="S">Bindings</Detail>
</ActionButton>
{#if codeBindingOpen}
<pre>{JSON.stringify(bindings, null, 2)}</pre>
{/if}
{/if}
<CodeEditor
value={inputData[key]}
on:change={e => {
// need to pass without the value inside
onChange({ detail: e.detail }, key)
inputData[key] = e.detail
}}
completions={stepCompletions}
mode={codeMode}
autocompleteEnabled={codeMode != EditorModes.JS}
height={500}
/>
<div class="messaging">
{#if codeMode == EditorModes.Handlebars}
<Icon name="FlashOn" />
<div class="messaging-wrap">
<div>
Add available bindings by typing <strong>
&#125;&#125;
</strong>
</div>
</div>
{/if}
</div>
</CodeEditorModal>
{:else if value.customType === "loopOption"}
<Select
on:change={e => onChange(e, key)}
autoWidth
value={inputData[key]}
options={["Array", "String"]}
defaultValue={"Array"}
/>
{:else if value.type === "string" || value.type === "number" || value.type === "integer"}
{#if isTestModal}
<ModalBindableInput
title={value.title}
value={inputData[key]}
panel={AutomationBindingPanel}
type={value.customType}
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
/>
{:else}
<div class="test">
<DrawerBindableInput
fillWidth={true}
title={value.title}
panel={AutomationBindingPanel}
type={value.customType}
value={inputData[key]}
on:change={e => onChange(e, key)}
{bindings}
updateOnChange={false}
placeholder={value.customType === "queryLimit"
? queryLimit
: ""}
drawerLeft="260px"
/>
</div>
{/if}
{/if}
{/if}
</div>
</div>
{/if}
{/each}
@ -541,6 +558,10 @@
{/if}
<style>
.field-width {
width: 320px;
}
.messaging {
display: flex;
align-items: center;
@ -555,8 +576,13 @@
}
.block-field {
display: grid;
grid-gap: 5px;
display: flex; /* Use Flexbox */
justify-content: space-between;
align-items: center;
flex-direction: row; /* Arrange label and field side by side */
align-items: center; /* Align vertically in the center */
gap: 10px; /* Add some space between label and field */
flex: 1;
}
.test :global(.drawer) {

View File

@ -23,7 +23,9 @@
</div>
</ModalContent>
</Modal>
<Button primary on:click={show}>Edit Code</Button>
<div class="center">
<Button primary on:click={show}>Edit Code</Button>
</div>
<style>
.container :global(section > header) {
@ -33,4 +35,9 @@
.container :global(textarea) {
min-height: 60px;
}
.center {
display: flex;
justify-content: center;
}
</style>

View File

@ -1,7 +1,7 @@
<script>
import { createEventDispatcher } from "svelte"
import { queries } from "stores/backend"
import { Select } from "@budibase/bbui"
import { Select, Label } from "@budibase/bbui"
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
@ -27,42 +27,56 @@
$: if (value?.queryId == null) value = { queryId: "" }
</script>
<div class="block-field">
<Select
label="Query"
on:change={onChangeQuery}
value={value.queryId}
options={$queries.list}
getOptionValue={query => query._id}
getOptionLabel={query => query.name}
/>
<div class="schema-field">
<Label>Query</Label>
<div class="field-width">
<Select
on:change={onChangeQuery}
value={value.queryId}
options={$queries.list}
getOptionValue={query => query._id}
getOptionLabel={query => query.name}
/>
</div>
</div>
{#if parameters.length}
<div class="schema-fields">
{#each parameters as field}
<DrawerBindableInput
panel={AutomationBindingPanel}
extraThin
value={value[field.name]}
on:change={e => onChange(e, field)}
label={field.name}
type="string"
{bindings}
fillWidth={true}
updateOnChange={false}
/>
{/each}
</div>
{#each parameters as field}
<div class="schema-field">
<Label>{field.name}</Label>
<div class="field-width">
<DrawerBindableInput
panel={AutomationBindingPanel}
extraThin
value={value[field.name]}
on:change={e => onChange(e, field)}
type="string"
{bindings}
fillWidth={true}
updateOnChange={false}
/>
</div>
</div>
{/each}
{/if}
<style>
.schema-fields {
display: grid;
grid-gap: var(--spacing-xl);
margin-top: var(--spacing-xl);
.field-width {
width: 320px;
}
.schema-fields :global(label) {
.schema-field {
display: flex;
justify-content: space-between;
align-items: center;
flex-direction: row;
align-items: center;
gap: 10px;
flex: 1;
margin-bottom: 10px;
}
.schema-field :global(label) {
text-transform: capitalize;
}
</style>

View File

@ -1,10 +1,11 @@
<script>
import { tables } from "stores/backend"
import { Select, Checkbox } from "@budibase/bbui"
import { Select, Checkbox, Label } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import RowSelectorTypes from "./RowSelectorTypes.svelte"
import DrawerBindableSlot from "../../common/bindings/DrawerBindableSlot.svelte"
import AutomationBindingPanel from "../../common/bindings/ServerBindingPanel.svelte"
import { TableNames } from "constants"
const dispatch = createEventDispatcher()
@ -99,41 +100,25 @@
$: if (value?.tableId == null) value = { tableId: "" }
</script>
<Select
on:change={onChangeTable}
value={value.tableId}
options={$tables.list}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
/>
<div class="schema-fields">
<Label>Table</Label>
<div class="field-width">
<Select
on:change={onChangeTable}
value={value.tableId}
options={$tables.list.filter(table => table._id !== TableNames.USERS)}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
/>
</div>
</div>
{#if schemaFields.length}
<div class="schema-fields">
{#each schemaFields as [field, schema]}
{#if !schema.autocolumn && schema.type !== "attachment"}
{#if isTestModal}
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
{value}
{onChange}
/>
{:else}
<DrawerBindableSlot
fillWidth
title={value.title}
label={field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
{#each schemaFields as [field, schema]}
{#if !schema.autocolumn && schema.type !== "attachment"}
<div class="schema-fields">
<Label>{field}</Label>
<div class="field-width">
{#if isTestModal}
<RowSelectorTypes
{isTestModal}
{field}
@ -142,28 +127,61 @@
{value}
{onChange}
/>
</DrawerBindableSlot>
{/if}
{/if}
{#if isUpdateRow && schema.type === "link"}
<div class="checkbox-field">
<Checkbox
value={meta.fields?.[field]?.clearRelationships}
text={"Clear relationships if empty?"}
size={"S"}
on:change={e => onChangeSetting(e, field)}
/>
{:else}
<DrawerBindableSlot
fillWidth
title={value.title}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
{value}
{onChange}
/>
</DrawerBindableSlot>
{/if}
{#if isUpdateRow && schema.type === "link"}
<div class="checkbox-field">
<Checkbox
value={meta.fields?.[field]?.clearRelationships}
text={"Clear relationships if empty?"}
size={"S"}
on:change={e => onChangeSetting(e, field)}
/>
</div>
{/if}
</div>
{/if}
{/each}
</div>
</div>
{/if}
{/each}
{/if}
<style>
.field-width {
width: 320px;
}
.schema-fields {
display: grid;
grid-gap: var(--spacing-s);
margin-top: var(--spacing-s);
display: flex;
justify-content: space-between;
align-items: center;
flex-direction: row;
align-items: center;
gap: 10px;
flex: 1;
margin-bottom: 10px;
}
.schema-fields :global(label) {
text-transform: capitalize;

View File

@ -1,11 +1,5 @@
<script>
import {
Select,
DatePicker,
Multiselect,
TextArea,
Label,
} from "@budibase/bbui"
import { Select, DatePicker, Multiselect, TextArea } from "@budibase/bbui"
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
import ModalBindableInput from "../../common/bindings/ModalBindableInput.svelte"
@ -33,20 +27,14 @@
{#if schemaHasOptions(schema) && schema.type !== "array"}
<Select
on:change={e => onChange(e, field)}
label={field}
value={value[field]}
options={schema.constraints.inclusion}
/>
{:else if schema.type === "datetime"}
<DatePicker
label={field}
value={value[field]}
on:change={e => onChange(e, field)}
/>
<DatePicker value={value[field]} on:change={e => onChange(e, field)} />
{:else if schema.type === "boolean"}
<Select
on:change={e => onChange(e, field)}
label={field}
value={value[field]}
options={[
{ label: "True", value: "true" },
@ -56,19 +44,13 @@
{:else if schema.type === "array"}
<Multiselect
bind:value={value[field]}
label={field}
options={schema.constraints.inclusion}
on:change={e => onChange(e, field)}
/>
{:else if schema.type === "longform"}
<TextArea
label={field}
bind:value={value[field]}
on:change={e => onChange(e, field)}
/>
<TextArea bind:value={value[field]} on:change={e => onChange(e, field)} />
{:else if schema.type === "json"}
<span>
<Label>{field}</Label>
<Editor
editorHeight="150"
mode="json"
@ -85,6 +67,7 @@
bind:linkedRows={value[field]}
{schema}
on:change={e => onChange(e, field)}
useLabel={false}
/>
{:else if schema.type === "string" || schema.type === "number"}
<svelte:component
@ -92,7 +75,6 @@
panel={AutomationBindingPanel}
value={value[field]}
on:change={e => onChange(e, field)}
label={field}
type="string"
bindings={parsedBindings}
fillWidth={true}

View File

@ -22,7 +22,7 @@
<Select
on:change={onChange}
bind:value
options={filteredTables}
options={filteredTables.filter(table => table._id !== TableNames.USERS)}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
/>

View File

@ -16,7 +16,6 @@
$: linkedTable = $tables.list.find(table => table._id === linkedTableId)
$: schema = linkedTable?.schema
$: table = $tables.list.find(table => table._id === tableId)
$: type = table?.type
$: fetchData(tableId, rowId)
$: {
let rowLabel = row?.[table?.primaryDisplay]
@ -41,5 +40,5 @@
</script>
{#if row && row._id === rowId}
<Table {title} {schema} {data} {type} />
<Table {title} {schema} {data} />
{/if}

View File

@ -24,17 +24,23 @@
let selectedRows = []
let customRenderers = []
let parsedSchema = {}
$: if (schema) {
parsedSchema = Object.keys(schema).reduce((acc, key) => {
acc[key] =
typeof schema[key] === "string" ? { type: schema[key] } : schema[key]
if (!canBeSortColumn(acc[key].type)) {
acc[key].sortable = false
}
return acc
}, {})
}
$: selectedRows, dispatch("selectionUpdated", selectedRows)
$: isUsersTable = tableId === TableNames.USERS
$: data && resetSelectedRows()
$: {
Object.values(schema || {}).forEach(col => {
if (!canBeSortColumn(col.type)) {
col.sortable = false
}
})
}
$: {
if (isUsersTable) {
customRenderers = [
@ -44,24 +50,24 @@
},
]
UNEDITABLE_USER_FIELDS.forEach(field => {
if (schema[field]) {
schema[field].editable = false
if (parsedSchema[field]) {
parsedSchema[field].editable = false
}
})
if (schema.email) {
schema.email.displayName = "Email"
if (parsedSchema.email) {
parsedSchema.email.displayName = "Email"
}
if (schema.roleId) {
schema.roleId.displayName = "Role"
if (parsedSchema.roleId) {
parsedSchema.roleId.displayName = "Role"
}
if (schema.firstName) {
schema.firstName.displayName = "First Name"
if (parsedSchema.firstName) {
parsedSchema.firstName.displayName = "First Name"
}
if (schema.lastName) {
schema.lastName.displayName = "Last Name"
if (parsedSchema.lastName) {
parsedSchema.lastName.displayName = "Last Name"
}
if (schema.status) {
schema.status.displayName = "Status"
if (parsedSchema.status) {
parsedSchema.status.displayName = "Status"
}
}
}
@ -97,7 +103,7 @@
<div class="table-wrapper">
<Table
{data}
{schema}
schema={parsedSchema}
{loading}
{customRenderers}
{rowCount}

Some files were not shown because too many files have changed in this diff Show More