Merge branch 'master' of github.com:Budibase/budibase into labday/sqs

This commit is contained in:
mike12345567 2023-10-17 14:53:09 +01:00
commit b267e4ca28
168 changed files with 4443 additions and 4096 deletions

View File

@ -1,9 +1,14 @@
packages/server/node_modules *
packages/builder !/packages/
packages/frontend-core !/scripts/
packages/backend-core /packages/*/node_modules
packages/worker/node_modules packages/server/scripts/
packages/cli !packages/server/scripts/integrations/oracle
packages/client !nx.json
packages/bbui !/hosting/single/
packages/string-templates !/hosting/letsencrypt /
!package.json
!yarn.lock
!lerna.json
!.yarnrc

View File

@ -10,7 +10,6 @@ on:
push: push:
branches: branches:
- master - master
- develop
pull_request: pull_request:
workflow_dispatch: workflow_dispatch:
@ -20,18 +19,12 @@ env:
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
NX_BASE_BRANCH: origin/${{ github.base_ref }} NX_BASE_BRANCH: origin/${{ github.base_ref }}
USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}} USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' && github.base_ref != 'master'}}
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs: jobs:
lint: lint:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Maximize build space
uses: easimon/maximize-build-space@master
with:
root-reserve-mb: 35000
swap-size-mb: 1024
remove-android: "true"
remove-dotnet: "true"
- name: Checkout repo and submodules - name: Checkout repo and submodules
uses: actions/checkout@v3 uses: actions/checkout@v3
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase' if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase'
@ -268,20 +261,21 @@ jobs:
branch="${{ github.base_ref || github.ref_name }}" branch="${{ github.base_ref || github.ref_name }}"
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})" echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
if [[ $branch == "master" ]]; then base_commit=$(git rev-parse origin/master)
base_commit=$(git rev-parse origin/master)
if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch"
echo "target_branch=$branch" >> "$GITHUB_OUTPUT"
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
else else
base_commit=$(git rev-parse origin/develop) echo "Nothing to do - branch to branch merge."
fi fi
echo "target_branch=$branch" - name: Check submodule merged to base branch
echo "target_branch=$branch" >> "$GITHUB_OUTPUT" if: ${{ steps.get_pro_commits.outputs.base_commit != '' }}
echo "pro_commit=$pro_commit"
echo "pro_commit=$pro_commit" >> "$GITHUB_OUTPUT"
echo "base_commit=$base_commit"
echo "base_commit=$base_commit" >> "$GITHUB_OUTPUT"
- name: Check submodule merged to develop
uses: actions/github-script@v4 uses: actions/github-script@v4
with: with:
github-token: ${{ secrets.GITHUB_TOKEN }} github-token: ${{ secrets.GITHUB_TOKEN }}
@ -290,7 +284,7 @@ jobs:
const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}'; const baseCommit = '${{ steps.get_pro_commits.outputs.base_commit }}';
if (submoduleCommit !== baseCommit) { if (submoduleCommit !== baseCommit) {
console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}"" branch.'); console.error('Submodule commit does not match the latest commit on the "${{ steps.get_pro_commits.outputs.target_branch }}" branch.');
console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md') console.error('Refer to the pro repo to merge your changes: https://github.com/Budibase/budibase-pro/blob/develop/docs/getting_started.md')
process.exit(1); process.exit(1);
} else { } else {

View File

@ -1,29 +0,0 @@
name: check_unreleased_changes
on:
pull_request:
branches:
- master
jobs:
check_unreleased:
runs-on: ubuntu-latest
steps:
- name: Check for unreleased changes
env:
REPO: "Budibase/budibase"
TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
RELEASE_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/releases/latest" | \
jq -r .published_at)
COMMIT_TIMESTAMP=$(curl -s -H "Authorization: token $TOKEN" \
"https://api.github.com/repos/$REPO/commits/master" | \
jq -r .commit.committer.date)
RELEASE_SECONDS=$(date --date="$RELEASE_TIMESTAMP" "+%s")
COMMIT_SECONDS=$(date --date="$COMMIT_TIMESTAMP" "+%s")
if (( COMMIT_SECONDS > RELEASE_SECONDS )); then
echo "There are unreleased changes. Please release these changes before merging."
exit 1
fi
echo "No unreleased changes detected."

View File

@ -4,7 +4,13 @@ on:
pull_request: pull_request:
types: [closed] types: [closed]
branches: branches:
- develop - master
workflow_dispatch:
inputs:
BRANCH:
type: string
description: Which featurebranch branch to destroy?
required: true
jobs: jobs:
release: release:
@ -13,7 +19,7 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: passeidireto/trigger-external-workflow-action@main - uses: passeidireto/trigger-external-workflow-action@main
env: env:
PAYLOAD_BRANCH: ${{ github.head_ref }} PAYLOAD_BRANCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.BRANCH || github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }} PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys

View File

@ -3,7 +3,7 @@ name: deploy-featurebranch
on: on:
pull_request: pull_request:
branches: branches:
- develop - master
jobs: jobs:
release: release:

View File

@ -1,41 +0,0 @@
name: "deploy-preprod"
on:
workflow_dispatch:
workflow_call:
jobs:
deploy-to-legacy-preprod-env:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-preprod-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -1,124 +0,0 @@
name: Budibase Prerelease
concurrency:
group: release-prerelease
cancel-in-progress: false
on:
push:
tags:
- "*-alpha.*"
workflow_dispatch:
env:
# Posthog token used by ui at build time
# disable unless needed for testing
# POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
jobs:
release-images:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not develop
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/develop; then
echo "Tag is not in develop"
exit 1
fi
- uses: actions/setup-node@v1
with:
node-version: 18.x
- run: yarn install --frozen-lockfile
- name: Update versions
run: ./scripts/updateVersions.sh
- run: yarn build
- run: yarn build:sdk
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
git submodule foreach git commit -a -m 'Release process'
git commit -a -m 'Release process'
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release:develop
- name: Build/release Docker images
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:develop
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
release-helm-chart:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version 0.0.0-develop --app-version develop --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: develop"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -110,19 +110,13 @@ jobs:
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}" git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push git push
deploy-to-legacy-preprod-env:
needs: [release-images]
uses: ./.github/workflows/deploy-preprod.yml
secrets: inherit
# Trigger deploy to new EKS preprod environment trigger-deploy-to-qa-env:
trigger-deploy-to-preprod-env:
needs: [release-helm-chart] needs: [release-helm-chart]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Get the current budibase release version
- name: Get the latest budibase release version
id: version id: version
run: | run: |
release_version=$(cat lerna.json | jq -r '.version') release_version=$(cat lerna.json | jq -r '.version')
@ -133,5 +127,5 @@ jobs:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }} PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys
event: budicloud-preprod-deploy event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }} github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -0,0 +1,69 @@
name: Test
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: "Checkout"
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: "yarn"
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Run Yarn
run: yarn
- name: Run Yarn Build
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
cache-to: type=inline
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View File

@ -18,7 +18,7 @@ jobs:
- name: Maximize build space - name: Maximize build space
uses: easimon/maximize-build-space@master uses: easimon/maximize-build-space@master
with: with:
root-reserve-mb: 35000 root-reserve-mb: 30000
swap-size-mb: 1024 swap-size-mb: 1024
remove-android: 'true' remove-android: 'true'
remove-dotnet: 'true' remove-dotnet: 'true'
@ -33,14 +33,6 @@ jobs:
with: with:
submodules: true submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Use Node.js ${{ matrix.node-version }} - name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1 uses: actions/setup-node@v1
@ -55,10 +47,6 @@ jobs:
run: yarn run: yarn
- name: Update versions - name: Update versions
run: ./scripts/updateVersions.sh run: ./scripts/updateVersions.sh
- name: Runt Yarn Lint
run: yarn lint
- name: Update versions
run: ./scripts/updateVersions.sh
- name: Run Yarn Build - name: Run Yarn Build
run: yarn build:docker:pre run: yarn build:docker:pre
- name: Login to Docker Hub - name: Login to Docker Hub

View File

@ -2,7 +2,7 @@ name: Close stale issues and PRs # https://github.com/actions/stale
on: on:
workflow_dispatch: workflow_dispatch:
schedule: schedule:
- cron: '*/30 * * * *' # Every 30 mins - cron: "*/30 * * * *" # Every 30 mins
jobs: jobs:
stale: stale:
@ -10,20 +10,37 @@ jobs:
steps: steps:
- uses: actions/stale@v8 - uses: actions/stale@v8
with: with:
# stale rules operations-per-run: 1
days-before-stale: 60 # stale rules for PRs
days-before-pr-stale: 7 days-before-pr-stale: 7
stale-issue-label: stale stale-issue-label: stale
stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for 60 days."
# close rules
# days after being marked as stale to close
days-before-close: 30
close-issue-label: closed-stale
close-issue-message: This issue has been automatically closed it has not had any activity in 90 days."
days-before-pr-close: 7
# exemptions
exempt-pr-labels: pinned,security,roadmap exempt-pr-labels: pinned,security,roadmap
days-before-pr-close: 7
- uses: actions/stale@v8
with:
operations-per-run: 3
# stale rules for high priority bugs
days-before-stale: 30
only-issue-labels: bug,High priority
stale-issue-label: warn
- uses: actions/stale@v8
with:
operations-per-run: 3
# stale rules for medium priority bugs
days-before-stale: 90
only-issue-labels: bug,Medium priority
stale-issue-label: warn
- uses: actions/stale@v8
with:
operations-per-run: 3
# stale rules for all bugs
days-before-stale: 180
stale-issue-label: stale
only-issue-labels: bug
stale-issue-message: "This issue has been automatically marked as stale because it has not had any activity for six months."
days-before-close: 30

View File

@ -1,42 +0,0 @@
name: Tag prerelease
concurrency:
group: tag-prerelease
cancel-in-progress: false
on:
push:
branches:
- develop
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch:
jobs:
tag-prerelease:
runs-on: ubuntu-latest
steps:
- name: Fail if branch is not develop
if: github.ref != 'refs/heads/develop'
run: |
echo "Ref is not develop, you must run this job from develop."
exit 1
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- run: cd scripts && yarn
- name: Tag prerelease
run: |
cd scripts
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
./versionCommit.sh prerelease

View File

@ -4,17 +4,6 @@ concurrency:
cancel-in-progress: false cancel-in-progress: false
on: on:
push:
branches:
- master
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch: workflow_dispatch:
inputs: inputs:
versioning: versioning:

View File

@ -1 +1 @@
network-timeout 100000 network-timeout 1000000

View File

@ -138,6 +138,8 @@ To develop the Budibase platform you'll need [Docker](https://www.docker.com/) a
`yarn setup` will check that all necessary components are installed and setup the repo for usage. `yarn setup` will check that all necessary components are installed and setup the repo for usage.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above command.
##### Manual method ##### Manual method
The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed). The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed).
@ -146,6 +148,8 @@ The following commands can be executed to manually get Budibase up and running (
`yarn build` will build all budibase packages. `yarn build` will build all budibase packages.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above commands.
#### 4. Running #### 4. Running
To run the budibase server and builder in dev mode (i.e. with live reloading): To run the budibase server and builder in dev mode (i.e. with live reloading):

View File

@ -12,14 +12,14 @@ RUN chmod +x /cleanup.sh
WORKDIR /app WORKDIR /app
ADD packages/server . ADD packages/server .
COPY yarn.lock . COPY yarn.lock .
RUN yarn install --production=true RUN yarn install --production=true --network-timeout 100000
RUN /cleanup.sh RUN /cleanup.sh
# build worker # build worker
WORKDIR /worker WORKDIR /worker
ADD packages/worker . ADD packages/worker .
COPY yarn.lock . COPY yarn.lock .
RUN yarn install --production=true RUN yarn install --production=true --network-timeout 100000
RUN /cleanup.sh RUN /cleanup.sh
FROM budibase/couchdb FROM budibase/couchdb

View File

@ -0,0 +1,126 @@
FROM node:18-slim as build
# install node-gyp dependencies
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
# copy and install dependencies
WORKDIR /app
COPY package.json .
COPY yarn.lock .
COPY lerna.json .
COPY .yarnrc .
COPY packages/server/package.json packages/server/package.json
COPY packages/worker/package.json packages/worker/package.json
# string-templates does not get bundled during the esbuild process, so we want to use the local version
COPY packages/string-templates/package.json packages/string-templates/package.json
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code
COPY packages/server/dist packages/server/dist
COPY packages/server/pm2.config.js packages/server/pm2.config.js
COPY packages/server/client packages/server/client
COPY packages/server/builder packages/server/builder
COPY packages/worker/dist packages/worker/dist
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https gpg -y
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx
COPY hosting/single/nginx/nginx-default-site.conf /etc/nginx/sites-enabled/default
RUN mkdir -p /var/log/nginx && \
touch /var/log/nginx/error.log && \
touch /var/run/nginx.pid && \
usermod -a -G tty www-data
WORKDIR /
RUN mkdir -p scripts/integrations/oracle
COPY packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup minio
WORKDIR /minio
COPY scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup runner file
WORKDIR /
COPY hosting/single/runner.sh .
RUN chmod +x ./runner.sh
COPY hosting/single/healthcheck.sh .
RUN chmod +x ./healthcheck.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
COPY hosting/single/ssh/sshd_config /etc/
COPY hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# setup letsencrypt certificate
RUN apt-get install -y certbot python3-certbot-nginx
COPY hosting/letsencrypt /app/letsencrypt
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
COPY --from=build /app/node_modules /node_modules
COPY --from=build /app/package.json /package.json
COPY --from=build /app/packages/server /app
COPY --from=build /app/packages/worker /worker
COPY --from=build /app/packages/string-templates /string-templates
RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"
# must set this just before running
ENV NODE_ENV=production
WORKDIR /
CMD ["./runner.sh"]

View File

@ -7,16 +7,16 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION [[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80 [[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker [[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000 [[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://127.0.0.1:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production [[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU [[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR" [[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app [[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379 [[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1 [[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002 [[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002 [[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001 [[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app [[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
# export CUSTOM_DOMAIN=budi001.custom.com # export CUSTOM_DOMAIN=budi001.custom.com
@ -51,7 +51,7 @@ do
fi fi
done done
if [[ -z "${COUCH_DB_URL}" ]]; then if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984 export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
fi fi
if [ ! -f "${DATA_DIR}/.env" ]; then if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env touch ${DATA_DIR}/.env

View File

@ -1,5 +1,5 @@
{ {
"version": "2.11.5-alpha.3", "version": "2.11.35",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -8,5 +8,9 @@
} }
} }
}, },
"targetDefaults": {} "targetDefaults": {
"build": {
"inputs": ["{workspaceRoot}/scripts/build.js"]
}
}
} }

View File

@ -3,14 +3,11 @@
"private": true, "private": true,
"devDependencies": { "devDependencies": {
"@esbuild-plugins/tsconfig-paths": "^0.1.2", "@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@nx/js": "16.4.3",
"@rollup/plugin-json": "^4.0.2",
"@typescript-eslint/parser": "6.7.2", "@typescript-eslint/parser": "6.7.2",
"esbuild": "^0.18.17", "esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0", "esbuild-node-externals": "^1.8.0",
"eslint": "^8.44.0", "eslint": "^8.44.0",
"husky": "^8.0.3", "husky": "^8.0.3",
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1", "kill-port": "^1.6.1",
"lerna": "7.1.1", "lerna": "7.1.1",
"madge": "^6.0.0", "madge": "^6.0.0",
@ -19,8 +16,6 @@
"nx-cloud": "16.0.5", "nx-cloud": "16.0.5",
"prettier": "2.8.8", "prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0", "prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
"rollup-plugin-replace": "^2.2.0",
"svelte": "3.49.0", "svelte": "3.49.0",
"typescript": "5.2.2", "typescript": "5.2.2",
"@babel/core": "^7.22.5", "@babel/core": "^7.22.5",
@ -51,7 +46,7 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker", "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server", "dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "dev:docker": "yarn build && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream", "test": "lerna run --stream test --stream",
"lint:eslint": "eslint packages qa-core --max-warnings=0", "lint:eslint": "eslint packages qa-core --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
@ -61,7 +56,6 @@
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint", "lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs", "build:specs": "lerna run --stream specs",
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -", "build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:pre": "yarn build && lerna run --stream predocker",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service", "build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -", "build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
@ -69,12 +63,10 @@
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -", "build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single": "./scripts/build-single-image.sh",
"build:docker:single": "yarn build && lerna run --concurrency 1 predocker && yarn build:docker:single:image",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting", "build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb", "publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting", "publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",
"build:docs": "lerna run --stream build:docs",
"release:helm": "node scripts/releaseHelmChart", "release:helm": "node scripts/releaseHelmChart",
"env:multi:enable": "lerna run --stream env:multi:enable", "env:multi:enable": "lerna run --stream env:multi:enable",
"env:multi:disable": "lerna run --stream env:multi:disable", "env:multi:disable": "lerna run --stream env:multi:disable",

View File

@ -26,7 +26,7 @@
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0", "aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
@ -62,7 +62,7 @@
"@trendyol/jest-testcontainers": "^2.1.1", "@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3", "@types/chance": "1.1.3",
"@types/cookies": "0.7.8", "@types/cookies": "0.7.8",
"@types/jest": "29.5.3", "@types/jest": "29.5.5",
"@types/lodash": "4.14.180", "@types/lodash": "4.14.180",
"@types/node": "18.17.0", "@types/node": "18.17.0",
"@types/node-fetch": "2.6.4", "@types/node-fetch": "2.6.4",

View File

@ -1,5 +1,10 @@
import { prefixed, DocumentType } from "@budibase/types" import { prefixed, DocumentType } from "@budibase/types"
export { SEPARATOR, UNICODE_MAX, DocumentType } from "@budibase/types" export {
SEPARATOR,
UNICODE_MAX,
DocumentType,
InternalTable,
} from "@budibase/types"
/** /**
* Can be used to create a few different forms of querying a view. * Can be used to create a few different forms of querying a view.
@ -30,10 +35,6 @@ export const DeprecatedViews = {
], ],
} }
export enum InternalTable {
USER_METADATA = "ta_users",
}
export const StaticDatabases = { export const StaticDatabases = {
GLOBAL: { GLOBAL: {
name: "global-db", name: "global-db",

View File

@ -45,6 +45,11 @@ export function generateGlobalUserID(id?: any) {
return `${DocumentType.USER}${SEPARATOR}${id || newid()}` return `${DocumentType.USER}${SEPARATOR}${id || newid()}`
} }
const isGlobalUserIDRegex = new RegExp(`^${DocumentType.USER}${SEPARATOR}.+`)
export function isGlobalUserID(id: string) {
return isGlobalUserIDRegex.test(id)
}
/** /**
* Generates a new user ID based on the passed in global ID. * Generates a new user ID based on the passed in global ID.
* @param {string} globalId The ID of the global user. * @param {string} globalId The ID of the global user.

View File

@ -1,5 +1,5 @@
import env from "../environment" import env from "../environment"
const cfsign = require("aws-cloudfront-sign") import * as cfsign from "aws-cloudfront-sign"
let PRIVATE_KEY: string | undefined let PRIVATE_KEY: string | undefined
@ -21,7 +21,7 @@ function getPrivateKey() {
const getCloudfrontSignParams = () => { const getCloudfrontSignParams = () => {
return { return {
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID, keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID!,
privateKeyString: getPrivateKey(), privateKeyString: getPrivateKey(),
expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour
} }

View File

@ -14,13 +14,14 @@ import {
} from "../db" } from "../db"
import { import {
BulkDocsResponse, BulkDocsResponse,
ContextUser,
SearchQuery,
SearchQueryOperators,
SearchUsersRequest, SearchUsersRequest,
User, User,
ContextUser,
} from "@budibase/types" } from "@budibase/types"
import { getGlobalDB } from "../context"
import * as context from "../context" import * as context from "../context"
import { user as userCache } from "../cache" import { getGlobalDB } from "../context"
type GetOpts = { cleanup?: boolean } type GetOpts = { cleanup?: boolean }
@ -39,6 +40,31 @@ function removeUserPassword(users: User | User[]) {
return users return users
} }
export const isSupportedUserSearch = (query: SearchQuery) => {
const allowed = [
{ op: SearchQueryOperators.STRING, key: "email" },
{ op: SearchQueryOperators.EQUAL, key: "_id" },
]
for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") {
return false
}
const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore
if (fields.length === 0) {
continue
}
const allowedOperation = allowed.find(
allow =>
allow.op === key && fields.length === 1 && fields[0] === allow.key
)
if (!allowedOperation) {
return false
}
}
return true
}
export const bulkGetGlobalUsersById = async ( export const bulkGetGlobalUsersById = async (
userIds: string[], userIds: string[],
opts?: GetOpts opts?: GetOpts
@ -211,8 +237,8 @@ export const searchGlobalUsersByEmail = async (
const PAGE_LIMIT = 8 const PAGE_LIMIT = 8
export const paginatedUsers = async ({ export const paginatedUsers = async ({
page, bookmark,
email, query,
appId, appId,
}: SearchUsersRequest = {}) => { }: SearchUsersRequest = {}) => {
const db = getGlobalDB() const db = getGlobalDB()
@ -222,18 +248,20 @@ export const paginatedUsers = async ({
limit: PAGE_LIMIT + 1, limit: PAGE_LIMIT + 1,
} }
// add a startkey if the page was specified (anchor) // add a startkey if the page was specified (anchor)
if (page) { if (bookmark) {
opts.startkey = page opts.startkey = bookmark
} }
// property specifies what to use for the page/anchor // property specifies what to use for the page/anchor
let userList: User[], let userList: User[],
property = "_id", property = "_id",
getKey getKey
if (appId) { if (query?.equal?._id) {
userList = [await getById(query.equal._id)]
} else if (appId) {
userList = await searchGlobalUsersByApp(appId, opts) userList = await searchGlobalUsersByApp(appId, opts)
getKey = (doc: any) => getGlobalUserByAppPage(appId, doc) getKey = (doc: any) => getGlobalUserByAppPage(appId, doc)
} else if (email) { } else if (query?.string?.email) {
userList = await searchGlobalUsersByEmail(email, opts) userList = await searchGlobalUsersByEmail(query?.string?.email, opts)
property = "email" property = "email"
} else { } else {
// no search, query allDocs // no search, query allDocs

View File

@ -82,9 +82,9 @@
"@spectrum-css/vars": "3.0.1", "@spectrum-css/vars": "3.0.1",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"easymde": "^2.16.1", "easymde": "^2.16.1",
"svelte-dnd-action": "^0.9.8",
"svelte-flatpickr": "3.2.3", "svelte-flatpickr": "3.2.3",
"svelte-portal": "^1.0.0", "svelte-portal": "^1.0.0"
"svelte-dnd-action": "^0.9.8"
}, },
"resolutions": { "resolutions": {
"loader-utils": "1.4.1" "loader-utils": "1.4.1"

View File

@ -21,14 +21,6 @@
"hsla(240, 90%, 75%, 0.3)", "hsla(240, 90%, 75%, 0.3)",
"hsla(320, 90%, 75%, 0.3)", "hsla(320, 90%, 75%, 0.3)",
] ]
$: {
if (constraints.inclusion.length) {
options = constraints.inclusion.map(value => ({
name: value,
id: Math.random(),
}))
}
}
const removeInput = idx => { const removeInput = idx => {
delete optionColors[options[idx].name] delete optionColors[options[idx].name]
constraints.inclusion = constraints.inclusion.filter((e, i) => i !== idx) constraints.inclusion = constraints.inclusion.filter((e, i) => i !== idx)
@ -80,6 +72,11 @@
// Initialize anchor arrays on mount, assuming 'options' is already populated // Initialize anchor arrays on mount, assuming 'options' is already populated
colorPopovers = constraints.inclusion.map(() => undefined) colorPopovers = constraints.inclusion.map(() => undefined)
anchors = constraints.inclusion.map(() => undefined) anchors = constraints.inclusion.map(() => undefined)
options = constraints.inclusion.map(value => ({
name: value,
id: Math.random(),
}))
}) })
</script> </script>

View File

@ -1,8 +0,0 @@
const ncp = require("ncp").ncp
ncp("./dist", "../server/builder", function (err) {
if (err) {
return console.error(err)
}
console.log("Copied dist folder to ../server/builder")
})

View File

@ -85,8 +85,8 @@
"@babel/core": "^7.12.14", "@babel/core": "^7.12.14",
"@babel/plugin-transform-runtime": "^7.13.10", "@babel/plugin-transform-runtime": "^7.13.10",
"@babel/preset-env": "^7.13.12", "@babel/preset-env": "^7.13.12",
"@rollup/plugin-replace": "^2.4.2", "@rollup/plugin-replace": "^5.0.3",
"@roxi/routify": "2.18.5", "@roxi/routify": "2.18.12",
"@sveltejs/vite-plugin-svelte": "1.0.1", "@sveltejs/vite-plugin-svelte": "1.0.1",
"@testing-library/jest-dom": "5.17.0", "@testing-library/jest-dom": "5.17.0",
"@testing-library/svelte": "^3.2.2", "@testing-library/svelte": "^3.2.2",
@ -95,16 +95,18 @@
"jest": "29.6.2", "jest": "29.6.2",
"jsdom": "^21.1.1", "jsdom": "^21.1.1",
"ncp": "^2.0.0", "ncp": "^2.0.0",
"rollup": "^2.44.0",
"svelte": "^3.48.0", "svelte": "^3.48.0",
"svelte-jester": "^1.3.2", "svelte-jester": "^1.3.2",
"vite": "^3.0.8", "vite": "^4.4.11",
"vite-plugin-static-copy": "^0.16.0", "vite-plugin-static-copy": "^0.17.0",
"vitest": "^0.29.2" "vitest": "^0.29.2"
}, },
"nx": { "nx": {
"targets": { "targets": {
"build": { "build": {
"outputs": [
"{workspaceRoot}/packages/server/builder"
],
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [

View File

@ -948,12 +948,15 @@ export const buildFormSchema = (component, asset) => {
if (component._component.endsWith("formblock")) { if (component._component.endsWith("formblock")) {
let schema = {} let schema = {}
const datasource = getDatasourceForProvider(asset, component) const datasource = getDatasourceForProvider(asset, component)
const info = getSchemaForDatasource(component, datasource) const info = getSchemaForDatasource(component, datasource)
if (!info?.schema) {
return schema
}
if (!component.fields) { if (!component.fields) {
Object.values(info?.schema) Object.values(info.schema)
.filter( .filter(
({ autocolumn, name }) => ({ autocolumn, name }) =>
!autocolumn && !["_rev", "_id"].includes(name) !autocolumn && !["_rev", "_id"].includes(name)

View File

@ -64,6 +64,7 @@ const INITIAL_FRONTEND_STATE = {
}, },
features: { features: {
componentValidation: false, componentValidation: false,
disableUserMetadata: false,
}, },
errors: [], errors: [],
hasAppPackage: false, hasAppPackage: false,

View File

@ -110,20 +110,7 @@
<div class="schema-fields"> <div class="schema-fields">
{#each schemaFields as [field, schema]} {#each schemaFields as [field, schema]}
{#if !schema.autocolumn && schema.type !== "attachment"} {#if !schema.autocolumn && schema.type !== "attachment"}
<DrawerBindableSlot {#if isTestModal}
fillWidth
title={value.title}
label={field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes <RowSelectorTypes
{isTestModal} {isTestModal}
{field} {field}
@ -132,7 +119,31 @@
{value} {value}
{onChange} {onChange}
/> />
</DrawerBindableSlot> {:else}
<DrawerBindableSlot
fillWidth
title={value.title}
label={field}
panel={AutomationBindingPanel}
type={schema.type}
{schema}
value={value[field]}
on:change={e => onChange(e, field)}
{bindings}
allowJS={true}
updateOnChange={false}
drawerLeft="260px"
>
<RowSelectorTypes
{isTestModal}
{field}
{schema}
bindings={parsedBindings}
{value}
{onChange}
/>
</DrawerBindableSlot>
{/if}
{/if} {/if}
{#if isUpdateRow && schema.type === "link"} {#if isUpdateRow && schema.type === "link"}
<div class="checkbox-field"> <div class="checkbox-field">

View File

@ -4,6 +4,7 @@
import { TableNames } from "constants" import { TableNames } from "constants"
import { Grid } from "@budibase/frontend-core" import { Grid } from "@budibase/frontend-core"
import { API } from "api" import { API } from "api"
import { store } from "builderStore"
import GridAddColumnModal from "components/backend/DataTable/modals/grid/GridCreateColumnModal.svelte" import GridAddColumnModal from "components/backend/DataTable/modals/grid/GridCreateColumnModal.svelte"
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte" import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
import GridEditUserModal from "components/backend/DataTable/modals/grid/GridEditUserModal.svelte" import GridEditUserModal from "components/backend/DataTable/modals/grid/GridEditUserModal.svelte"
@ -17,11 +18,11 @@
import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte" import GridUsersTableButton from "components/backend/DataTable/modals/grid/GridUsersTableButton.svelte"
const userSchemaOverrides = { const userSchemaOverrides = {
firstName: { displayName: "First name" }, firstName: { displayName: "First name", disabled: true },
lastName: { displayName: "Last name" }, lastName: { displayName: "Last name", disabled: true },
email: { displayName: "Email" }, email: { displayName: "Email", disabled: true },
roleId: { displayName: "Role" }, roleId: { displayName: "Role", disabled: true },
status: { displayName: "Status" }, status: { displayName: "Status", disabled: true },
} }
$: id = $tables.selected?._id $: id = $tables.selected?._id
@ -60,14 +61,14 @@
datasource={gridDatasource} datasource={gridDatasource}
canAddRows={!isUsersTable} canAddRows={!isUsersTable}
canDeleteRows={!isUsersTable} canDeleteRows={!isUsersTable}
canEditRows={!isUsersTable} canEditRows={!isUsersTable || !$store.features.disableUserMetadata}
canEditColumns={!isUsersTable} canEditColumns={!isUsersTable || !$store.features.disableUserMetadata}
schemaOverrides={isUsersTable ? userSchemaOverrides : null} schemaOverrides={isUsersTable ? userSchemaOverrides : null}
showAvatars={false} showAvatars={false}
on:updatedatasource={handleGridTableUpdate} on:updatedatasource={handleGridTableUpdate}
> >
<svelte:fragment slot="filter"> <svelte:fragment slot="filter">
{#if isUsersTable} {#if isUsersTable && $store.features.disableUserMetadata}
<GridUsersTableButton /> <GridUsersTableButton />
{/if} {/if}
<GridFilterButton /> <GridFilterButton />

View File

@ -13,7 +13,13 @@
let modal let modal
$: tempValue = filters || [] $: tempValue = filters || []
$: schemaFields = Object.values(schema || {}) $: schemaFields = Object.entries(schema || {}).map(
([fieldName, fieldSchema]) => ({
name: fieldName, // Using the key as name if not defined in the schema, for example in some autogenerated columns
...fieldSchema,
})
)
$: text = getText(filters) $: text = getText(filters)
$: selected = tempValue.filter(x => !x.onEmptyFilter)?.length > 0 $: selected = tempValue.filter(x => !x.onEmptyFilter)?.length > 0

View File

@ -33,17 +33,16 @@
import { getBindings } from "components/backend/DataTable/formula" import { getBindings } from "components/backend/DataTable/formula"
import JSONSchemaModal from "./JSONSchemaModal.svelte" import JSONSchemaModal from "./JSONSchemaModal.svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core" import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldType } from "@budibase/types" import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte" import RelationshipSelector from "components/common/RelationshipSelector.svelte"
const AUTO_TYPE = "auto" const AUTO_TYPE = FIELDS.AUTO.type
const FORMULA_TYPE = FIELDS.FORMULA.type const FORMULA_TYPE = FIELDS.FORMULA.type
const LINK_TYPE = FIELDS.LINK.type const LINK_TYPE = FIELDS.LINK.type
const STRING_TYPE = FIELDS.STRING.type const STRING_TYPE = FIELDS.STRING.type
const NUMBER_TYPE = FIELDS.NUMBER.type const NUMBER_TYPE = FIELDS.NUMBER.type
const JSON_TYPE = FIELDS.JSON.type const JSON_TYPE = FIELDS.JSON.type
const DATE_TYPE = FIELDS.DATETIME.type const DATE_TYPE = FIELDS.DATETIME.type
const USER_REFRENCE_TYPE = FIELDS.BB_REFERENCE_USER.compositeType
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"] const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
@ -52,7 +51,24 @@
export let field export let field
let mounted = false let mounted = false
let fieldDefinitions = cloneDeep(FIELDS) const fieldDefinitions = Object.values(FIELDS).reduce(
// Storing the fields by complex field id
(acc, field) => ({
...acc,
[makeFieldId(field.type, field.subtype)]: field,
}),
{}
)
function makeFieldId(type, subtype, autocolumn) {
// don't make field IDs for auto types
if (type === AUTO_TYPE || autocolumn) {
return type.toUpperCase()
} else {
return `${type}${subtype || ""}`.toUpperCase()
}
}
let originalName let originalName
let linkEditDisabled let linkEditDisabled
let primaryDisplay let primaryDisplay
@ -72,8 +88,8 @@
let jsonSchemaModal let jsonSchemaModal
let allowedTypes = [] let allowedTypes = []
let editableColumn = { let editableColumn = {
type: fieldDefinitions.STRING.type, type: FIELDS.STRING.type,
constraints: fieldDefinitions.STRING.constraints, constraints: FIELDS.STRING.constraints,
// Initial value for column name in other table for linked records // Initial value for column name in other table for linked records
fieldName: $tables.selected.name, fieldName: $tables.selected.name,
} }
@ -139,9 +155,6 @@
$tables.selected.primaryDisplay == null || $tables.selected.primaryDisplay == null ||
$tables.selected.primaryDisplay === editableColumn.name $tables.selected.primaryDisplay === editableColumn.name
if (editableColumn.type === FieldType.BB_REFERENCE) {
editableColumn.type = `${editableColumn.type}_${editableColumn.subtype}`
}
// Here we are setting the relationship values based on the editableColumn // Here we are setting the relationship values based on the editableColumn
// This part of the code is used when viewing an existing field hence the check // This part of the code is used when viewing an existing field hence the check
// for the tableId // for the tableId
@ -172,7 +185,18 @@
} }
} }
allowedTypes = getAllowedTypes() if (!savingColumn) {
editableColumn.fieldId = makeFieldId(
editableColumn.type,
editableColumn.subtype,
editableColumn.autocolumn
)
allowedTypes = getAllowedTypes().map(t => ({
fieldId: makeFieldId(t.type, t.subtype),
...t,
}))
}
} }
$: initialiseField(field, savingColumn) $: initialiseField(field, savingColumn)
@ -249,13 +273,7 @@
let saveColumn = cloneDeep(editableColumn) let saveColumn = cloneDeep(editableColumn)
// Handle types on composite types delete saveColumn.fieldId
const definition = fieldDefinitions[saveColumn.type.toUpperCase()]
if (definition && saveColumn.type === definition.compositeType) {
saveColumn.type = definition.type
saveColumn.subtype = definition.subtype
delete saveColumn.compositeType
}
if (saveColumn.type === AUTO_TYPE) { if (saveColumn.type === AUTO_TYPE) {
saveColumn = buildAutoColumn( saveColumn = buildAutoColumn(
@ -320,27 +338,33 @@
} }
} }
function handleTypeChange(event) { function onHandleTypeChange(event) {
handleTypeChange(event.detail)
}
function handleTypeChange(type) {
// remove any extra fields that may not be related to this type // remove any extra fields that may not be related to this type
delete editableColumn.autocolumn delete editableColumn.autocolumn
delete editableColumn.subtype delete editableColumn.subtype
delete editableColumn.tableId delete editableColumn.tableId
delete editableColumn.relationshipType delete editableColumn.relationshipType
delete editableColumn.formulaType delete editableColumn.formulaType
delete editableColumn.constraints
// Add in defaults and initial definition // Add in defaults and initial definition
const definition = fieldDefinitions[event.detail?.toUpperCase()] const definition = fieldDefinitions[type?.toUpperCase()]
if (definition?.constraints) { if (definition?.constraints) {
editableColumn.constraints = definition.constraints editableColumn.constraints = definition.constraints
} }
editableColumn.type = definition.type
editableColumn.subtype = definition.subtype
// Default relationships many to many // Default relationships many to many
if (editableColumn.type === LINK_TYPE) { if (editableColumn.type === LINK_TYPE) {
editableColumn.relationshipType = RelationshipType.MANY_TO_MANY editableColumn.relationshipType = RelationshipType.MANY_TO_MANY
} else if (editableColumn.type === FORMULA_TYPE) { } else if (editableColumn.type === FORMULA_TYPE) {
editableColumn.formulaType = "dynamic" editableColumn.formulaType = "dynamic"
} else if (editableColumn.type === USER_REFRENCE_TYPE) {
editableColumn.relationshipType = RelationshipType.ONE_TO_MANY
} }
} }
@ -381,10 +405,27 @@
return ALLOWABLE_NUMBER_OPTIONS return ALLOWABLE_NUMBER_OPTIONS
} }
const isUsers =
editableColumn.type === FieldType.BB_REFERENCE &&
editableColumn.subtype === FieldSubtype.USERS
if (!external) { if (!external) {
return [ return [
...Object.values(fieldDefinitions), FIELDS.STRING,
{ name: "Auto Column", type: AUTO_TYPE }, FIELDS.BARCODEQR,
FIELDS.LONGFORM,
FIELDS.OPTIONS,
FIELDS.ARRAY,
FIELDS.NUMBER,
FIELDS.BIGINT,
FIELDS.BOOLEAN,
FIELDS.DATETIME,
FIELDS.ATTACHMENT,
FIELDS.LINK,
FIELDS.FORMULA,
FIELDS.JSON,
isUsers ? FIELDS.USERS : FIELDS.USER,
FIELDS.AUTO,
] ]
} else { } else {
let fields = [ let fields = [
@ -397,7 +438,7 @@
FIELDS.BOOLEAN, FIELDS.BOOLEAN,
FIELDS.FORMULA, FIELDS.FORMULA,
FIELDS.BIGINT, FIELDS.BIGINT,
FIELDS.BB_REFERENCE_USER, isUsers ? FIELDS.USERS : FIELDS.USER,
] ]
// no-sql or a spreadsheet // no-sql or a spreadsheet
if (!external || table.sql) { if (!external || table.sql) {
@ -472,6 +513,13 @@
return newError return newError
} }
function isUsersColumn(column) {
return (
column.type === FieldType.BB_REFERENCE &&
[FieldSubtype.USER, FieldSubtype.USERS].includes(column.subtype)
)
}
onMount(() => { onMount(() => {
mounted = true mounted = true
}) })
@ -489,14 +537,14 @@
{/if} {/if}
<Select <Select
disabled={!typeEnabled} disabled={!typeEnabled}
bind:value={editableColumn.type} bind:value={editableColumn.fieldId}
on:change={handleTypeChange} on:change={onHandleTypeChange}
options={allowedTypes} options={allowedTypes}
getOptionLabel={field => field.name} getOptionLabel={field => field.name}
getOptionValue={field => field.compositeType || field.type} getOptionValue={field => field.fieldId}
getOptionIcon={field => field.icon} getOptionIcon={field => field.icon}
isOptionEnabled={option => { isOptionEnabled={option => {
if (option.type == AUTO_TYPE) { if (option.type === AUTO_TYPE) {
return availableAutoColumnKeys?.length > 0 return availableAutoColumnKeys?.length > 0
} }
return true return true
@ -555,7 +603,7 @@
<DatePicker bind:value={editableColumn.constraints.datetime.latest} /> <DatePicker bind:value={editableColumn.constraints.datetime.latest} />
</div> </div>
</div> </div>
{#if datasource?.source !== "ORACLE" && datasource?.source !== "SQL_SERVER" && !editableColumn.dateOnly} {#if datasource?.source !== SourceName.ORACLE && datasource?.source !== SourceName.SQL_SERVER && !editableColumn.dateOnly}
<div> <div>
<div class="row"> <div class="row">
<Label>Time zones</Label> <Label>Time zones</Label>
@ -659,13 +707,16 @@
<Button primary text on:click={openJsonSchemaEditor} <Button primary text on:click={openJsonSchemaEditor}
>Open schema editor</Button >Open schema editor</Button
> >
{:else if editableColumn.type === USER_REFRENCE_TYPE} {:else if isUsersColumn(editableColumn) && datasource?.source !== SourceName.GOOGLE_SHEETS}
<Toggle <Toggle
value={editableColumn.relationshipType === RelationshipType.MANY_TO_MANY} value={editableColumn.subtype === FieldSubtype.USERS}
on:change={e => on:change={e =>
(editableColumn.relationshipType = e.detail handleTypeChange(
? RelationshipType.MANY_TO_MANY makeFieldId(
: RelationshipType.ONE_TO_MANY)} FieldType.BB_REFERENCE,
e.detail ? FieldSubtype.USERS : FieldSubtype.USER
)
)}
disabled={!isCreating} disabled={!isCreating}
thin thin
text="Allow multiple users" text="Allow multiple users"

View File

@ -13,6 +13,8 @@
import { Helpers } from "@budibase/bbui" import { Helpers } from "@budibase/bbui"
import { RelationshipErrorChecker } from "./relationshipErrors" import { RelationshipErrorChecker } from "./relationshipErrors"
import { onMount } from "svelte" import { onMount } from "svelte"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
import { PrettyRelationshipDefinitions } from "constants/backend"
export let save export let save
export let datasource export let datasource
@ -22,16 +24,21 @@
export let selectedFromTable export let selectedFromTable
export let close export let close
const relationshipTypes = [ let relationshipMap = {
{ [RelationshipType.MANY_TO_MANY]: {
label: "One to Many", part1: PrettyRelationshipDefinitions.MANY,
value: RelationshipType.MANY_TO_ONE, part2: PrettyRelationshipDefinitions.MANY,
}, },
{ [RelationshipType.MANY_TO_ONE]: {
label: "Many to Many", part1: PrettyRelationshipDefinitions.ONE,
value: RelationshipType.MANY_TO_MANY, part2: PrettyRelationshipDefinitions.MANY,
}, },
] }
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
let originalFromColumnName = toRelationship.name, let originalFromColumnName = toRelationship.name,
originalToColumnName = fromRelationship.name originalToColumnName = fromRelationship.name
@ -49,14 +56,32 @@
) )
let errors = {} let errors = {}
let fromPrimary, fromForeign, fromColumn, toColumn let fromPrimary, fromForeign, fromColumn, toColumn
let fromId, toId, throughId, throughToKey, throughFromKey
let throughId, throughToKey, throughFromKey
let isManyToMany, isManyToOne, relationshipType let isManyToMany, isManyToOne, relationshipType
let hasValidated = false let hasValidated = false
$: fromId = null
$: toId = null
$: tableOptions = plusTables.map(table => ({ $: tableOptions = plusTables.map(table => ({
label: table.name, label: table.name,
value: table._id, value: table._id,
name: table.name,
_id: table._id,
})) }))
$: {
// Determine the relationship type based on the selected values of both parts
relationshipType = Object.entries(relationshipMap).find(
([_, parts]) =>
parts.part1 === relationshipPart1 && parts.part2 === relationshipPart2
)?.[0]
changed(() => {
hasValidated = false
})
}
$: valid = $: valid =
getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType) getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType)
$: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY $: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY
@ -338,33 +363,34 @@
onConfirm={saveRelationship} onConfirm={saveRelationship}
disabled={!valid} disabled={!valid}
> >
<Select
label="Relationship type"
options={relationshipTypes}
bind:value={relationshipType}
bind:error={errors.relationshipType}
on:change={() =>
changed(() => {
hasValidated = false
})}
/>
<div class="headings"> <div class="headings">
<Detail>Tables</Detail> <Detail>Tables</Detail>
</div> </div>
{#if !selectedFromTable}
<Select <RelationshipSelector
label="Select from table" bind:relationshipPart1
options={tableOptions} bind:relationshipPart2
bind:value={fromId} bind:relationshipTableIdPrimary={fromId}
bind:error={errors.fromTable} bind:relationshipTableIdSecondary={toId}
on:change={e => {relationshipOpts1}
changed(() => { {relationshipOpts2}
const table = plusTables.find(tbl => tbl._id === e.detail) {tableOptions}
fromColumn = table?.name || "" {errors}
fromPrimary = table?.primary?.[0] primaryDisabled={selectedFromTable}
})} primaryTableChanged={e =>
/> changed(() => {
{/if} const table = plusTables.find(tbl => tbl._id === e.detail)
fromColumn = table?.name || ""
fromPrimary = table?.primary?.[0]
})}
secondaryTableChanged={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{#if isManyToOne && fromId} {#if isManyToOne && fromId}
<Select <Select
label={`Primary Key (${getTable(fromId).name})`} label={`Primary Key (${getTable(fromId).name})`}
@ -374,18 +400,6 @@
on:change={changed} on:change={changed}
/> />
{/if} {/if}
<Select
label={"Select to table"}
options={tableOptions}
bind:value={toId}
bind:error={errors.toTable}
on:change={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{#if isManyToMany} {#if isManyToMany}
<Select <Select
label={"Through"} label={"Through"}

View File

@ -57,7 +57,7 @@
{#if $store.error} {#if $store.error}
<InlineAlert <InlineAlert
type="error" type="error"
header={$store.error.title} header="Error fetching {tableType}"
message={$store.error.description} message={$store.error.description}
/> />
{/if} {/if}

View File

@ -1,6 +1,6 @@
import { derived, writable, get } from "svelte/store" import { derived, writable, get } from "svelte/store"
import { keepOpen, notifications } from "@budibase/bbui" import { keepOpen, notifications } from "@budibase/bbui"
import { datasources, ImportTableError, tables } from "stores/backend" import { datasources, tables } from "stores/backend"
export const createTableSelectionStore = (integration, datasource) => { export const createTableSelectionStore = (integration, datasource) => {
const tableNamesStore = writable([]) const tableNamesStore = writable([])
@ -30,12 +30,7 @@ export const createTableSelectionStore = (integration, datasource) => {
notifications.success(`Tables fetched successfully.`) notifications.success(`Tables fetched successfully.`)
await onComplete() await onComplete()
} catch (err) { } catch (err) {
if (err instanceof ImportTableError) { errorStore.set(err)
errorStore.set(err)
} else {
notifications.error("Error fetching tables.")
}
return keepOpen return keepOpen
} }
} }

View File

@ -49,6 +49,15 @@
label: "Long Form Text", label: "Long Form Text",
value: FIELDS.LONGFORM.type, value: FIELDS.LONGFORM.type,
}, },
{
label: "User",
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
},
{
label: "Users",
value: `${FIELDS.USERS.type}${FIELDS.USERS.subtype}`,
},
] ]
$: { $: {
@ -143,7 +152,7 @@
<div class="field"> <div class="field">
<span>{name}</span> <span>{name}</span>
<Select <Select
value={schema[name]?.type} value={`${schema[name]?.type}${schema[name]?.subtype || ""}`}
options={typeOptions} options={typeOptions}
placeholder={null} placeholder={null}
getOptionLabel={option => option.label} getOptionLabel={option => option.label}

View File

@ -3,6 +3,7 @@
import { FIELDS } from "constants/backend" import { FIELDS } from "constants/backend"
import { API } from "api" import { API } from "api"
import { parseFile } from "./utils" import { parseFile } from "./utils"
import { canBeDisplayColumn } from "@budibase/shared-core"
export let rows = [] export let rows = []
export let schema = {} export let schema = {}
@ -10,36 +11,82 @@
export let displayColumn = null export let displayColumn = null
export let promptUpload = false export let promptUpload = false
const typeOptions = [ const typeOptions = {
{ [FIELDS.STRING.type]: {
label: "Text", label: "Text",
value: FIELDS.STRING.type, value: FIELDS.STRING.type,
config: {
type: FIELDS.STRING.type,
constraints: FIELDS.STRING.constraints,
},
}, },
{ [FIELDS.NUMBER.type]: {
label: "Number", label: "Number",
value: FIELDS.NUMBER.type, value: FIELDS.NUMBER.type,
config: {
type: FIELDS.NUMBER.type,
constraints: FIELDS.NUMBER.constraints,
},
}, },
{ [FIELDS.DATETIME.type]: {
label: "Date", label: "Date",
value: FIELDS.DATETIME.type, value: FIELDS.DATETIME.type,
config: {
type: FIELDS.DATETIME.type,
constraints: FIELDS.DATETIME.constraints,
},
}, },
{ [FIELDS.OPTIONS.type]: {
label: "Options", label: "Options",
value: FIELDS.OPTIONS.type, value: FIELDS.OPTIONS.type,
config: {
type: FIELDS.OPTIONS.type,
constraints: FIELDS.OPTIONS.constraints,
},
}, },
{ [FIELDS.ARRAY.type]: {
label: "Multi-select", label: "Multi-select",
value: FIELDS.ARRAY.type, value: FIELDS.ARRAY.type,
config: {
type: FIELDS.ARRAY.type,
constraints: FIELDS.ARRAY.constraints,
},
}, },
{ [FIELDS.BARCODEQR.type]: {
label: "Barcode/QR", label: "Barcode/QR",
value: FIELDS.BARCODEQR.type, value: FIELDS.BARCODEQR.type,
config: {
type: FIELDS.BARCODEQR.type,
constraints: FIELDS.BARCODEQR.constraints,
},
}, },
{ [FIELDS.LONGFORM.type]: {
label: "Long Form Text", label: "Long Form Text",
value: FIELDS.LONGFORM.type, value: FIELDS.LONGFORM.type,
config: {
type: FIELDS.LONGFORM.type,
constraints: FIELDS.LONGFORM.constraints,
},
}, },
] user: {
label: "User",
value: "user",
config: {
type: FIELDS.USER.type,
subtype: FIELDS.USER.subtype,
constraints: FIELDS.USER.constraints,
},
},
users: {
label: "Users",
value: "users",
config: {
type: FIELDS.USERS.type,
subtype: FIELDS.USERS.subtype,
constraints: FIELDS.USERS.constraints,
},
},
}
let fileInput let fileInput
let error = null let error = null
@ -48,10 +95,16 @@
let validation = {} let validation = {}
let validateHash = "" let validateHash = ""
let errors = {} let errors = {}
let selectedColumnTypes = {}
$: displayColumnOptions = Object.keys(schema || {}).filter(column => { $: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column] return validation[column] && canBeDisplayColumn(schema[column].type)
}) })
$: if (displayColumn && !canBeDisplayColumn(schema[displayColumn].type)) {
displayColumn = null
}
$: { $: {
// binding in consumer is causing double renders here // binding in consumer is causing double renders here
const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema) const newValidateHash = JSON.stringify(rows) + JSON.stringify(schema)
@ -72,6 +125,13 @@
rows = response.rows rows = response.rows
schema = response.schema schema = response.schema
fileName = response.fileName fileName = response.fileName
selectedColumnTypes = Object.entries(response.schema).reduce(
(acc, [colName, fieldConfig]) => ({
...acc,
[colName]: fieldConfig.type,
}),
{}
)
} catch (e) { } catch (e) {
loading = false loading = false
error = e error = e
@ -98,8 +158,10 @@
} }
const handleChange = (name, e) => { const handleChange = (name, e) => {
schema[name].type = e.detail const { config } = typeOptions[e.detail]
schema[name].constraints = FIELDS[e.detail.toUpperCase()].constraints schema[name].type = config.type
schema[name].subtype = config.subtype
schema[name].constraints = config.constraints
} }
const openFileUpload = (promptUpload, fileInput) => { const openFileUpload = (promptUpload, fileInput) => {
@ -142,9 +204,9 @@
<div class="field"> <div class="field">
<span>{column.name}</span> <span>{column.name}</span>
<Select <Select
bind:value={column.type} bind:value={selectedColumnTypes[column.name]}
on:change={e => handleChange(name, e)} on:change={e => handleChange(name, e)}
options={typeOptions} options={Object.values(typeOptions)}
placeholder={null} placeholder={null}
getOptionLabel={option => option.label} getOptionLabel={option => option.label}
getOptionValue={option => option.value} getOptionValue={option => option.value}

View File

@ -102,7 +102,7 @@
</div> </div>
{/if} {/if}
<div class="text" title={showTooltip ? text : null}> <div class="text" title={showTooltip ? text : null}>
{text} <span title={text}>{text}</span>
{#if selectedBy} {#if selectedBy}
<UserAvatars size="XS" users={selectedBy} /> <UserAvatars size="XS" users={selectedBy} />
{/if} {/if}
@ -227,9 +227,6 @@
.text { .text {
font-weight: 600; font-weight: 600;
font-size: 12px; font-size: 12px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
flex: 1 1 auto; flex: 1 1 auto;
color: var(--spectrum-global-color-gray-900); color: var(--spectrum-global-color-gray-900);
order: 2; order: 2;
@ -238,6 +235,11 @@
align-items: center; align-items: center;
gap: 8px; gap: 8px;
} }
.text span {
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
.scrollable .text { .scrollable .text {
flex: 0 0 auto; flex: 0 0 auto;
max-width: 160px; max-width: 160px;

View File

@ -6,11 +6,14 @@
export let relationshipTableIdPrimary export let relationshipTableIdPrimary
export let relationshipTableIdSecondary export let relationshipTableIdSecondary
export let editableColumn export let editableColumn
export let linkEditDisabled export let linkEditDisabled = false
export let tableOptions export let tableOptions
export let errors export let errors
export let relationshipOpts1 export let relationshipOpts1
export let relationshipOpts2 export let relationshipOpts2
export let primaryTableChanged
export let secondaryTableChanged
export let primaryDisabled = true
</script> </script>
<div class="relationship-container"> <div class="relationship-container">
@ -19,16 +22,19 @@
disabled={linkEditDisabled} disabled={linkEditDisabled}
bind:value={relationshipPart1} bind:value={relationshipPart1}
options={relationshipOpts1} options={relationshipOpts1}
bind:error={errors.relationshipType}
/> />
</div> </div>
<div class="relationship-label">in</div> <div class="relationship-label">in</div>
<div class="relationship-part"> <div class="relationship-part">
<Select <Select
disabled disabled={primaryDisabled}
options={tableOptions} options={tableOptions}
getOptionLabel={table => table.name} getOptionLabel={table => table.name}
getOptionValue={table => table._id} getOptionValue={table => table._id}
bind:value={relationshipTableIdPrimary} bind:value={relationshipTableIdPrimary}
on:change={primaryTableChanged}
bind:error={errors.fromTable}
/> />
</div> </div>
</div> </div>
@ -46,20 +52,24 @@
<Select <Select
disabled={linkEditDisabled} disabled={linkEditDisabled}
bind:value={relationshipTableIdSecondary} bind:value={relationshipTableIdSecondary}
bind:error={errors.toTable}
options={tableOptions.filter( options={tableOptions.filter(
table => table._id !== relationshipTableIdPrimary table => table._id !== relationshipTableIdPrimary
)} )}
getOptionLabel={table => table.name} getOptionLabel={table => table.name}
getOptionValue={table => table._id} getOptionValue={table => table._id}
on:change={secondaryTableChanged}
/> />
</div> </div>
</div> </div>
<Input {#if editableColumn}
disabled={linkEditDisabled} <Input
label={`Column name in other table`} disabled={linkEditDisabled}
bind:value={editableColumn.fieldName} label={`Column name in other table`}
error={errors.relatedName} bind:value={editableColumn.fieldName}
/> error={errors.relatedName}
/>
{/if}
<style> <style>
.relationship-container { .relationship-container {

View File

@ -1,91 +0,0 @@
<script>
import { Button, ActionButton, Drawer } from "@budibase/bbui"
import { createEventDispatcher } from "svelte"
import ColumnDrawer from "./ColumnEditor/ColumnDrawer.svelte"
import { cloneDeep } from "lodash/fp"
import {
getDatasourceForProvider,
getSchemaForDatasource,
} from "builderStore/dataBinding"
import { currentAsset } from "builderStore"
import { getFields } from "helpers/searchFields"
export let componentInstance
export let value = []
export let allowCellEditing = true
export let subject = "Table"
const dispatch = createEventDispatcher()
let drawer
let boundValue
$: datasource = getDatasourceForProvider($currentAsset, componentInstance)
$: schema = getSchema($currentAsset, datasource)
$: options = allowCellEditing
? Object.keys(schema || {})
: enrichedSchemaFields?.map(field => field.name)
$: sanitisedValue = getValidColumns(value, options)
$: updateBoundValue(sanitisedValue)
$: enrichedSchemaFields = getFields(Object.values(schema || {}), {
allowLinks: true,
})
const getSchema = (asset, datasource) => {
const schema = getSchemaForDatasource(asset, datasource).schema
// Don't show ID and rev in tables
if (schema) {
delete schema._id
delete schema._rev
}
return schema
}
const updateBoundValue = value => {
boundValue = cloneDeep(value)
}
const getValidColumns = (columns, options) => {
if (!Array.isArray(columns) || !columns.length) {
return []
}
// We need to account for legacy configs which would just be an array
// of strings
if (typeof columns[0] === "string") {
columns = columns.map(col => ({
name: col,
displayName: col,
}))
}
return columns.filter(column => {
return options.includes(column.name)
})
}
const open = () => {
updateBoundValue(sanitisedValue)
drawer.show()
}
const save = () => {
dispatch("change", getValidColumns(boundValue, options))
drawer.hide()
}
</script>
<ActionButton on:click={open}>Configure columns</ActionButton>
<Drawer bind:this={drawer} title="{subject} Columns">
<svelte:fragment slot="description">
Configure the columns in your {subject.toLowerCase()}.
</svelte:fragment>
<Button cta slot="buttons" on:click={save}>Save</Button>
<ColumnDrawer
slot="body"
bind:columns={boundValue}
{options}
{schema}
{allowCellEditing}
/>
</Drawer>

View File

@ -37,7 +37,7 @@
} }
$: datasource = getDatasourceForProvider($currentAsset, componentInstance) $: datasource = getDatasourceForProvider($currentAsset, componentInstance)
$: resourceId = datasource.resourceId || datasource.tableId $: resourceId = datasource?.resourceId || datasource?.tableId
$: if (!isEqual(value, cachedValue)) { $: if (!isEqual(value, cachedValue)) {
cachedValue = cloneDeep(value) cachedValue = cloneDeep(value)

View File

@ -3,21 +3,23 @@
Body, Body,
Button, Button,
Combobox, Combobox,
Multiselect,
DatePicker, DatePicker,
DrawerContent, DrawerContent,
Icon, Icon,
Input, Input,
Layout,
Select,
Label, Label,
Layout,
Multiselect,
Select,
} from "@budibase/bbui" } from "@budibase/bbui"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte" import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte" import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { generate } from "shortid" import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core" import { Constants, LuceneUtils } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields" import { getFields } from "helpers/searchFields"
import { FieldType } from "@budibase/types"
import { createEventDispatcher, onMount } from "svelte" import { createEventDispatcher, onMount } from "svelte"
import FilterUsers from "./FilterUsers.svelte"
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
@ -29,7 +31,6 @@
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const { OperatorOptions } = Constants const { OperatorOptions } = Constants
const { getValidOperatorsForType } = LuceneUtils
const KeyedFieldRegex = /\d[0-9]*:/g const KeyedFieldRegex = /\d[0-9]*:/g
const behaviourOptions = [ const behaviourOptions = [
{ value: "and", label: "Match all filters" }, { value: "and", label: "Match all filters" },
@ -120,22 +121,19 @@
return enrichedSchemaFields.find(field => field.name === filter.field) return enrichedSchemaFields.find(field => field.name === filter.field)
} }
const santizeTypes = filter => { const sanitizeTypes = filter => {
// Update type based on field // Update type based on field
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field) const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field)
filter.type = fieldSchema?.type filter.type = fieldSchema?.type
filter.subtype = fieldSchema?.subtype
// Update external type based on field // Update external type based on field
filter.externalType = getSchema(filter)?.externalType filter.externalType = getSchema(filter)?.externalType
} }
const santizeOperator = filter => { const sanitizeOperator = filter => {
// Ensure a valid operator is selected // Ensure a valid operator is selected
const operators = getValidOperatorsForType( const operators = getValidOperatorsForType(filter).map(x => x.value)
filter.type,
filter.field,
datasource
).map(x => x.value)
if (!operators.includes(filter.operator)) { if (!operators.includes(filter.operator)) {
filter.operator = operators[0] ?? OperatorOptions.Equals.value filter.operator = operators[0] ?? OperatorOptions.Equals.value
} }
@ -148,7 +146,7 @@
filter.noValue = noValueOptions.includes(filter.operator) filter.noValue = noValueOptions.includes(filter.operator)
} }
const santizeValue = filter => { const sanitizeValue = (filter, previousType) => {
// Check if the operator allows a value at all // Check if the operator allows a value at all
if (filter.noValue) { if (filter.noValue) {
filter.value = null filter.value = null
@ -162,28 +160,47 @@
} }
} else if (filter.type === "array" && filter.valueType === "Value") { } else if (filter.type === "array" && filter.valueType === "Value") {
filter.value = [] filter.value = []
} else if (
previousType !== filter.type &&
(previousType === FieldType.BB_REFERENCE ||
filter.type === FieldType.BB_REFERENCE)
) {
filter.value = filter.type === "array" ? [] : null
} }
} }
const onFieldChange = filter => { const onFieldChange = filter => {
santizeTypes(filter) const previousType = filter.type
santizeOperator(filter) sanitizeTypes(filter)
santizeValue(filter) sanitizeOperator(filter)
sanitizeValue(filter, previousType)
} }
const onOperatorChange = filter => { const onOperatorChange = filter => {
santizeOperator(filter) sanitizeOperator(filter)
santizeValue(filter) sanitizeValue(filter, filter.type)
} }
const onValueTypeChange = filter => { const onValueTypeChange = filter => {
santizeValue(filter) sanitizeValue(filter)
} }
const getFieldOptions = field => { const getFieldOptions = field => {
const schema = enrichedSchemaFields.find(x => x.name === field) const schema = enrichedSchemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || [] return schema?.constraints?.inclusion || []
} }
const getValidOperatorsForType = filter => {
if (!filter?.field) {
return []
}
return LuceneUtils.getValidOperatorsForType(
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)
}
</script> </script>
<DrawerContent> <DrawerContent>
@ -228,11 +245,7 @@
/> />
<Select <Select
disabled={!filter.field} disabled={!filter.field}
options={getValidOperatorsForType( options={getValidOperatorsForType(filter)}
filter.type,
filter.field,
datasource
)}
bind:value={filter.operator} bind:value={filter.operator}
on:change={() => onOperatorChange(filter)} on:change={() => onOperatorChange(filter)}
placeholder={null} placeholder={null}
@ -285,6 +298,15 @@
timeOnly={getSchema(filter)?.timeOnly} timeOnly={getSchema(filter)?.timeOnly}
bind:value={filter.value} bind:value={filter.value}
/> />
{:else if filter.type === FieldType.BB_REFERENCE}
<FilterUsers
bind:value={filter.value}
multiselect={[
OperatorOptions.In.value,
OperatorOptions.ContainsAny.value,
].includes(filter.operator)}
disabled={filter.noValue}
/>
{:else} {:else}
<DrawerBindableInput disabled /> <DrawerBindableInput disabled />
{/if} {/if}

View File

@ -0,0 +1,34 @@
<script>
import { Select, Multiselect } from "@budibase/bbui"
import { fetchData } from "@budibase/frontend-core"
import { API } from "api"
export let value = null
export let disabled
export let multiselect = false
$: fetch = fetchData({
API,
datasource: {
type: "user",
},
options: {
limit: 100,
},
})
$: options = $fetch.rows
$: component = multiselect ? Multiselect : Select
</script>
<svelte:component
this={component}
bind:value
autocomplete
{options}
getOptionLabel={option => option.email}
getOptionValue={option => option._id}
{disabled}
/>

View File

@ -20,9 +20,7 @@
const getSortableFields = schema => { const getSortableFields = schema => {
return Object.entries(schema || {}) return Object.entries(schema || {})
.filter( .filter(entry => !UNSORTABLE_TYPES.includes(entry[1].type))
entry => !UNSORTABLE_TYPES.includes(entry[1].type) && entry[1].sortable
)
.map(entry => entry[0]) .map(entry => entry[0])
} }

View File

@ -54,6 +54,7 @@
label="App export" label="App export"
on:change={e => { on:change={e => {
file = e.detail?.[0] file = e.detail?.[0]
encrypted = file?.name?.endsWith(".enc.tar.gz")
}} }}
/> />
<Toggle text="Encrypted" bind:value={encrypted} /> <Toggle text="Encrypted" bind:value={encrypted} />

View File

@ -1,133 +1,4 @@
export const FIELDS = { import { FieldType, FieldSubtype } from "@budibase/types"
STRING: {
name: "Text",
type: "string",
icon: "Text",
constraints: {
type: "string",
length: {},
presence: false,
},
},
BARCODEQR: {
name: "Barcode/QR",
type: "barcodeqr",
icon: "Camera",
constraints: {
type: "string",
length: {},
presence: false,
},
},
LONGFORM: {
name: "Long Form Text",
type: "longform",
icon: "TextAlignLeft",
constraints: {
type: "string",
length: {},
presence: false,
},
},
OPTIONS: {
name: "Options",
type: "options",
icon: "Dropdown",
constraints: {
type: "string",
presence: false,
inclusion: [],
},
},
ARRAY: {
name: "Multi-select",
type: "array",
icon: "Duplicate",
constraints: {
type: "array",
presence: false,
inclusion: [],
},
},
NUMBER: {
name: "Number",
type: "number",
icon: "123",
constraints: {
type: "number",
presence: false,
numericality: { greaterThanOrEqualTo: "", lessThanOrEqualTo: "" },
},
},
BIGINT: {
name: "BigInt",
type: "bigint",
icon: "TagBold",
},
BOOLEAN: {
name: "Boolean",
type: "boolean",
icon: "Boolean",
constraints: {
type: "boolean",
presence: false,
},
},
DATETIME: {
name: "Date/Time",
type: "datetime",
icon: "Calendar",
constraints: {
type: "string",
length: {},
presence: false,
datetime: {
latest: "",
earliest: "",
},
},
},
ATTACHMENT: {
name: "Attachment",
type: "attachment",
icon: "Folder",
constraints: {
type: "array",
presence: false,
},
},
LINK: {
name: "Relationship",
type: "link",
icon: "Link",
constraints: {
type: "array",
presence: false,
},
},
FORMULA: {
name: "Formula",
type: "formula",
icon: "Calculator",
constraints: {},
},
JSON: {
name: "JSON",
type: "json",
icon: "Brackets",
constraints: {
type: "object",
presence: false,
},
},
BB_REFERENCE_USER: {
name: "User",
type: "bb_reference",
subtype: "user",
compositeType: "bb_reference_user", // Used for working with the subtype on CreateEditColumn as is it was a primary type
icon: "User",
},
}
export const AUTO_COLUMN_SUB_TYPES = { export const AUTO_COLUMN_SUB_TYPES = {
AUTO_ID: "autoID", AUTO_ID: "autoID",
@ -145,6 +16,151 @@ export const AUTO_COLUMN_DISPLAY_NAMES = {
UPDATED_AT: "Updated At", UPDATED_AT: "Updated At",
} }
export const FIELDS = {
STRING: {
name: "Text",
type: FieldType.STRING,
icon: "Text",
constraints: {
type: "string",
length: {},
presence: false,
},
},
BARCODEQR: {
name: "Barcode/QR",
type: FieldType.BARCODEQR,
icon: "Camera",
constraints: {
type: "string",
length: {},
presence: false,
},
},
LONGFORM: {
name: "Long Form Text",
type: FieldType.LONGFORM,
icon: "TextAlignLeft",
constraints: {
type: "string",
length: {},
presence: false,
},
},
OPTIONS: {
name: "Options",
type: FieldType.OPTIONS,
icon: "Dropdown",
constraints: {
type: "string",
presence: false,
inclusion: [],
},
},
ARRAY: {
name: "Multi-select",
type: FieldType.ARRAY,
icon: "Duplicate",
constraints: {
type: "array",
presence: false,
inclusion: [],
},
},
NUMBER: {
name: "Number",
type: FieldType.NUMBER,
icon: "123",
constraints: {
type: "number",
presence: false,
numericality: { greaterThanOrEqualTo: "", lessThanOrEqualTo: "" },
},
},
BIGINT: {
name: "BigInt",
type: FieldType.BIGINT,
icon: "TagBold",
},
BOOLEAN: {
name: "Boolean",
type: FieldType.BOOLEAN,
icon: "Boolean",
constraints: {
type: "boolean",
presence: false,
},
},
DATETIME: {
name: "Date/Time",
type: FieldType.DATETIME,
icon: "Calendar",
constraints: {
type: "string",
length: {},
presence: false,
datetime: {
latest: "",
earliest: "",
},
},
},
ATTACHMENT: {
name: "Attachment",
type: FieldType.ATTACHMENT,
icon: "Folder",
constraints: {
type: "array",
presence: false,
},
},
LINK: {
name: "Relationship",
type: FieldType.LINK,
icon: "Link",
constraints: {
type: "array",
presence: false,
},
},
AUTO: {
name: "Auto Column",
type: FieldType.AUTO,
icon: "MagicWand",
constraints: {},
},
FORMULA: {
name: "Formula",
type: FieldType.FORMULA,
icon: "Calculator",
constraints: {},
},
JSON: {
name: "JSON",
type: FieldType.JSON,
icon: "Brackets",
constraints: {
type: "object",
presence: false,
},
},
USER: {
name: "User",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USER,
icon: "User",
},
USERS: {
name: "Users",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
icon: "User",
constraints: {
type: "array",
},
},
}
export const FILE_TYPES = { export const FILE_TYPES = {
IMAGE: ["png", "tiff", "gif", "raw", "jpg", "jpeg"], IMAGE: ["png", "tiff", "gif", "raw", "jpg", "jpeg"],
CODE: ["js", "rs", "py", "java", "rb", "hs", "yml"], CODE: ["js", "rs", "py", "java", "rb", "hs", "yml"],

View File

@ -3,16 +3,17 @@
* e.g. * e.g.
* name all names result * name all names result
* ------ ----------- -------- * ------ ----------- --------
* ("foo") ["foo"] "foo (1)" * ("foo") ["foo"] "foo 1"
* ("foo") ["foo", "foo (1)"] "foo (2)" * ("foo") ["foo", "foo 1"] "foo 2"
* ("foo (1)") ["foo", "foo (1)"] "foo (2)" * ("foo 1") ["foo", "foo 1"] "foo 2"
* ("foo") ["foo", "foo (2)"] "foo (1)" * ("foo") ["foo", "foo 2"] "foo 1"
* *
* Repl * Repl
*/ */
export const duplicateName = (name, allNames) => { export const duplicateName = (name, allNames) => {
const baseName = name.split(" (")[0] const duplicatePattern = new RegExp(`\\s(\\d+)$`)
const isDuplicate = new RegExp(`${baseName}\\s\\((\\d+)\\)$`) const baseName = name.split(duplicatePattern)[0]
const isDuplicate = new RegExp(`${baseName}\\s(\\d+)$`)
// get the sequence from matched names // get the sequence from matched names
const sequence = [] const sequence = []
@ -28,7 +29,6 @@ export const duplicateName = (name, allNames) => {
return false return false
}) })
sequence.sort((a, b) => a - b) sequence.sort((a, b) => a - b)
// get the next number in the sequence // get the next number in the sequence
let number let number
if (sequence.length === 0) { if (sequence.length === 0) {
@ -46,5 +46,5 @@ export const duplicateName = (name, allNames) => {
} }
} }
return `${baseName} (${number})` return `${baseName} ${number}`
} }

View File

@ -9,34 +9,34 @@ describe("duplicate", () => {
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)") expect(duplicate).toBe("foo 1")
}) })
it("with multiple existing", async () => { it("with multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)"] const names = ["foo", "foo 1", "foo 2"]
const name = "foo" const name = "foo"
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)") expect(duplicate).toBe("foo 3")
}) })
it("with mixed multiple existing", async () => { it("with mixed multiple existing", async () => {
const names = ["foo", "foo (1)", "foo (2)", "bar", "bar (1)", "bar (2)"] const names = ["foo", "foo 1", "foo 2", "bar", "bar 1", "bar 2"]
const name = "foo" const name = "foo"
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (3)") expect(duplicate).toBe("foo 3")
}) })
it("with incomplete sequence", async () => { it("with incomplete sequence", async () => {
const names = ["foo", "foo (2)", "foo (3)"] const names = ["foo", "foo 2", "foo 3"]
const name = "foo" const name = "foo"
const duplicate = duplicateName(name, names) const duplicate = duplicateName(name, names)
expect(duplicate).toBe("foo (1)") expect(duplicate).toBe("foo 1")
}) })
}) })
}) })

View File

@ -118,7 +118,7 @@
} }
const getOperatorOptions = condition => { const getOperatorOptions = condition => {
return LuceneUtils.getValidOperatorsForType(condition.valueType) return LuceneUtils.getValidOperatorsForType({ type: condition.valueType })
} }
const onOperatorChange = (condition, newOperator) => { const onOperatorChange = (condition, newOperator) => {
@ -137,9 +137,9 @@
condition.referenceValue = null condition.referenceValue = null
// Ensure a valid operator is set // Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(newType).map( const validOperators = LuceneUtils.getValidOperatorsForType({
x => x.value type: newType,
) }).map(x => x.value)
if (!validOperators.includes(condition.operator)) { if (!validOperators.includes(condition.operator)) {
condition.operator = condition.operator =
validOperators[0] ?? Constants.OperatorOptions.Equals.value validOperators[0] ?? Constants.OperatorOptions.Equals.value

View File

@ -13,7 +13,7 @@
import ExportAppModal from "components/start/ExportAppModal.svelte" import ExportAppModal from "components/start/ExportAppModal.svelte"
import ImportAppModal from "components/start/ImportAppModal.svelte" import ImportAppModal from "components/start/ImportAppModal.svelte"
$: filteredApps = $apps.filter(app => app.devId == $store.appId) $: filteredApps = $apps.filter(app => app.devId === $store.appId)
$: app = filteredApps.length ? filteredApps[0] : {} $: app = filteredApps.length ? filteredApps[0] : {}
$: appDeployed = app?.status === AppStatus.DEPLOYED $: appDeployed = app?.status === AppStatus.DEPLOYED

View File

@ -123,7 +123,10 @@
prevUserSearch = search prevUserSearch = search
try { try {
userPageInfo.loading() userPageInfo.loading()
await users.search({ userPage, email: search }) await users.search({
bookmark: userPage,
query: { string: { email: search } },
})
userPageInfo.fetched($users.hasNextPage, $users.nextPage) userPageInfo.fetched($users.hasNextPage, $users.nextPage)
} catch (error) { } catch (error) {
notifications.error("Error getting user list") notifications.error("Error getting user list")

View File

@ -31,7 +31,10 @@
prevSearch = search prevSearch = search
try { try {
pageInfo.loading() pageInfo.loading()
await users.search({ page, email: search }) await users.search({
bookmark: page,
query: { string: { email: search } },
})
pageInfo.fetched($users.hasNextPage, $users.nextPage) pageInfo.fetched($users.hasNextPage, $users.nextPage)
} catch (error) { } catch (error) {
notifications.error("Error getting user list") notifications.error("Error getting user list")

View File

@ -9,15 +9,19 @@ import { API } from "api"
import { DatasourceFeature } from "@budibase/types" import { DatasourceFeature } from "@budibase/types"
import { TableNames } from "constants" import { TableNames } from "constants"
export class ImportTableError extends Error { class TableImportError extends Error {
constructor(message) { constructor(errors) {
super(message) super()
const [title, description] = message.split(" - ") this.name = "TableImportError"
this.errors = errors
}
this.name = "TableSelectionError" get description() {
// Capitalize the first character of both the title and description let message = ""
this.title = title[0].toUpperCase() + title.substr(1) for (const key in this.errors) {
this.description = description[0].toUpperCase() + description.substr(1) message += `${key}: ${this.errors[key]}\n`
}
return message
} }
} }
@ -25,7 +29,6 @@ export function createDatasourcesStore() {
const store = writable({ const store = writable({
list: [], list: [],
selectedDatasourceId: null, selectedDatasourceId: null,
schemaError: null,
}) })
const derivedStore = derived([store, tables], ([$store, $tables]) => { const derivedStore = derived([store, tables], ([$store, $tables]) => {
@ -75,18 +78,13 @@ export function createDatasourcesStore() {
store.update(state => ({ store.update(state => ({
...state, ...state,
selectedDatasourceId: id, selectedDatasourceId: id,
// Remove any possible schema error
schemaError: null,
})) }))
} }
const updateDatasource = response => { const updateDatasource = response => {
const { datasource, error } = response const { datasource, errors } = response
if (error) { if (errors && Object.keys(errors).length > 0) {
store.update(state => ({ throw new TableImportError(errors)
...state,
schemaError: error,
}))
} }
replaceDatasource(datasource._id, datasource) replaceDatasource(datasource._id, datasource)
select(datasource._id) select(datasource._id)
@ -94,20 +92,11 @@ export function createDatasourcesStore() {
} }
const updateSchema = async (datasource, tablesFilter) => { const updateSchema = async (datasource, tablesFilter) => {
try { const response = await API.buildDatasourceSchema({
const response = await API.buildDatasourceSchema({ datasourceId: datasource?._id,
datasourceId: datasource?._id, tablesFilter,
tablesFilter, })
}) updateDatasource(response)
updateDatasource(response)
} catch (e) {
// buildDatasourceSchema call returns user presentable errors with two parts divided with a " - ".
if (e.message.split(" - ").length === 2) {
throw new ImportTableError(e.message)
} else {
throw e
}
}
} }
const sourceCount = source => { const sourceCount = source => {
@ -172,12 +161,6 @@ export function createDatasourcesStore() {
replaceDatasource(datasource._id, null) replaceDatasource(datasource._id, null)
} }
const removeSchemaError = () => {
store.update(state => {
return { ...state, schemaError: null }
})
}
const replaceDatasource = (datasourceId, datasource) => { const replaceDatasource = (datasourceId, datasource) => {
if (!datasourceId) { if (!datasourceId) {
return return
@ -230,7 +213,6 @@ export function createDatasourcesStore() {
create, create,
update, update,
delete: deleteDatasource, delete: deleteDatasource,
removeSchemaError,
replaceDatasource, replaceDatasource,
getTableNames, getTableNames,
} }

View File

@ -4,7 +4,7 @@ export { views } from "./views"
export { viewsV2 } from "./viewsV2" export { viewsV2 } from "./viewsV2"
export { permissions } from "./permissions" export { permissions } from "./permissions"
export { roles } from "./roles" export { roles } from "./roles"
export { datasources, ImportTableError } from "./datasources" export { datasources } from "./datasources"
export { integrations } from "./integrations" export { integrations } from "./integrations"
export { sortedIntegrations } from "./sortedIntegrations" export { sortedIntegrations } from "./sortedIntegrations"
export { queries } from "./queries" export { queries } from "./queries"

View File

@ -57,7 +57,8 @@ export async function checkDockerConfigured() {
"docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose" "docker/docker-compose has not been installed, please follow instructions at: https://docs.budibase.com/docs/docker-compose"
const docker = await lookpath("docker") const docker = await lookpath("docker")
const compose = await lookpath("docker-compose") const compose = await lookpath("docker-compose")
if (!docker || !compose) { const composeV2 = await lookpath("docker compose")
if (!docker || (!compose && !composeV2)) {
throw error throw error
} }
} }

View File

@ -12,6 +12,10 @@ if (!process.argv[0].includes("node")) {
checkForBinaries() checkForBinaries()
} }
function localPrebuildPath() {
return join(process.execPath, "..", PREBUILDS)
}
function checkForBinaries() { function checkForBinaries() {
const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH) const readDir = join(__filename, "..", "..", "..", "cli", PREBUILDS, ARCH)
if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) { if (fs.existsSync(PREBUILD_DIR) || !fs.existsSync(readDir)) {
@ -19,17 +23,21 @@ function checkForBinaries() {
} }
const natives = fs.readdirSync(readDir) const natives = fs.readdirSync(readDir)
if (fs.existsSync(readDir)) { if (fs.existsSync(readDir)) {
const writePath = join(process.execPath, PREBUILDS, ARCH) const writePath = join(localPrebuildPath(), ARCH)
fs.mkdirSync(writePath, { recursive: true }) fs.mkdirSync(writePath, { recursive: true })
for (let native of natives) { for (let native of natives) {
const filename = `${native.split(".fake")[0]}.node` const filename = `${native.split(".fake")[0]}.node`
fs.cpSync(join(readDir, native), join(writePath, filename)) fs.cpSync(join(readDir, native), join(writePath, filename))
} }
console.log("copied something")
} }
} }
function cleanup(evt?: number) { function cleanup(evt?: number) {
// cleanup prebuilds first
const path = localPrebuildPath()
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
}
if (evt && !isNaN(evt)) { if (evt && !isNaN(evt)) {
return return
} }
@ -41,10 +49,6 @@ function cleanup(evt?: number) {
) )
console.error(error(evt)) console.error(error(evt))
} }
const path = join(process.execPath, PREBUILDS)
if (fs.existsSync(path)) {
fs.rmSync(path, { recursive: true })
}
} }
const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"] const events = ["exit", "SIGINT", "SIGUSR1", "SIGUSR2", "uncaughtException"]

View File

@ -3419,6 +3419,17 @@
"value": "custom" "value": "custom"
} }
}, },
{
"type": "event",
"label": "On change",
"key": "onChange",
"context": [
{
"label": "Field Value",
"key": "value"
}
]
},
{ {
"type": "validation/string", "type": "validation/string",
"label": "Validation", "label": "Validation",
@ -5598,6 +5609,21 @@
} }
] ]
}, },
{
"type": "event",
"label": "On row click",
"key": "onRowClick",
"context": [
{
"label": "Clicked row",
"key": "row"
}
],
"dependsOn": {
"setting": "allowEditRows",
"value": false
}
},
{ {
"type": "boolean", "type": "boolean",
"label": "Add rows", "label": "Add rows",
@ -5673,11 +5699,6 @@
"label": "Validation", "label": "Validation",
"key": "validation" "key": "validation"
}, },
{
"type": "filter/relationship",
"label": "Filtering",
"key": "filter"
},
{ {
"type": "boolean", "type": "boolean",
"label": "Search", "label": "Search",

View File

@ -14,12 +14,14 @@
export let initialSortOrder = null export let initialSortOrder = null
export let fixedRowHeight = null export let fixedRowHeight = null
export let columns = null export let columns = null
export let onRowClick = null
const component = getContext("component") const component = getContext("component")
const { styleable, API, builderStore, notificationStore } = getContext("sdk") const { styleable, API, builderStore, notificationStore } = getContext("sdk")
$: columnWhitelist = columns?.map(col => col.name) $: columnWhitelist = columns?.map(col => col.name)
$: schemaOverrides = getSchemaOverrides(columns) $: schemaOverrides = getSchemaOverrides(columns)
$: handleRowClick = allowEditRows ? undefined : onRowClick
const getSchemaOverrides = columns => { const getSchemaOverrides = columns => {
let overrides = {} let overrides = {}
@ -56,6 +58,7 @@
showControls={false} showControls={false}
notifySuccess={notificationStore.actions.success} notifySuccess={notificationStore.actions.success}
notifyError={notificationStore.actions.error} notifyError={notificationStore.actions.error}
on:rowclick={e => handleRowClick?.({ row: e.detail })}
/> />
</div> </div>

View File

@ -63,7 +63,7 @@
// Ensure a valid operator is set // Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType( const validOperators = LuceneUtils.getValidOperatorsForType(
expression.type, { type: expression.type },
expression.field, expression.field,
datasource datasource
).map(x => x.value) ).map(x => x.value)
@ -125,7 +125,7 @@
<Select <Select
disabled={!filter.field} disabled={!filter.field}
options={LuceneUtils.getValidOperatorsForType( options={LuceneUtils.getValidOperatorsForType(
filter.type, { type: filter.type, subtype: filter.subtype },
filter.field, filter.field,
datasource datasource
)} )}

View File

@ -1,9 +1,28 @@
<script> <script>
import RelationshipField from "./RelationshipField.svelte" import RelationshipField from "./RelationshipField.svelte"
import { sdk } from "@budibase/shared-core"
export let defaultValue
function updateUserIDs(value) {
if (Array.isArray(value)) {
return value.map(val => sdk.users.getGlobalUserID(val))
} else {
return sdk.users.getGlobalUserID(value)
}
}
function updateReferences(value) {
if (sdk.users.containsUserID(value)) {
return updateUserIDs(value)
}
return value
}
</script> </script>
<RelationshipField <RelationshipField
{...$$props} {...$$props}
datasourceType={"user"} datasourceType={"user"}
primaryDisplay={"email"} primaryDisplay={"email"}
defaultValue={updateReferences(defaultValue)}
/> />

View File

@ -128,6 +128,7 @@
<div class="manual-input"> <div class="manual-input">
<Input <Input
bind:value bind:value
updateOnChange={false}
on:change={() => { on:change={() => {
dispatch("change", value) dispatch("change", value)
}} }}

View File

@ -105,19 +105,25 @@
} }
} }
$: fetchRows(searchTerm, primaryDisplay) $: fetchRows(searchTerm, primaryDisplay, defaultValue)
const fetchRows = (searchTerm, primaryDisplay) => { const fetchRows = async (searchTerm, primaryDisplay, defaultVal) => {
const allRowsFetched = const allRowsFetched =
$fetch.loaded && $fetch.loaded &&
!Object.keys($fetch.query?.string || {}).length && !Object.keys($fetch.query?.string || {}).length &&
!$fetch.hasNextPage !$fetch.hasNextPage
// Don't request until we have the primary display // Don't request until we have the primary display or default value has been fetched
if (!allRowsFetched && primaryDisplay) { if (allRowsFetched || !primaryDisplay) {
fetch.update({ return
query: { string: { [primaryDisplay]: searchTerm } }, }
if (defaultVal && !optionsObj[defaultVal]) {
await fetch.update({
query: { equal: { _id: defaultVal } },
}) })
} }
await fetch.update({
query: { string: { [primaryDisplay]: searchTerm } },
})
} }
const flatten = values => { const flatten = values => {
@ -160,7 +166,9 @@
const handleChange = value => { const handleChange = value => {
const changed = fieldApi.setValue(value) const changed = fieldApi.setValue(value)
if (onChange && changed) { if (onChange && changed) {
onChange({ value }) onChange({
value,
})
} }
} }

View File

@ -10,24 +10,28 @@ export const buildUserEndpoints = API => ({
/** /**
* Gets a list of users in the current tenant. * Gets a list of users in the current tenant.
* @param {string} page The page to retrieve * @param {string} bookmark The page to retrieve
* @param {string} search The starts with string to search username/email by. * @param {object} query search filters for lookup by user (all operators not supported).
* @param {string} appId Facilitate app/role based user searching * @param {string} appId Facilitate app/role based user searching
* @param {boolean} paginated Allow the disabling of pagination * @param {boolean} paginate Allow the disabling of pagination
* @param {number} limit How many users to retrieve in a single search
*/ */
searchUsers: async ({ paginated, page, email, appId } = {}) => { searchUsers: async ({ paginate, bookmark, query, appId, limit } = {}) => {
const opts = {} const opts = {}
if (page) { if (bookmark) {
opts.page = page opts.bookmark = bookmark
} }
if (email) { if (query) {
opts.email = email opts.query = query
} }
if (appId) { if (appId) {
opts.appId = appId opts.appId = appId
} }
if (typeof paginated === "boolean") { if (typeof paginate === "boolean") {
opts.paginated = paginated opts.paginate = paginate
}
if (limit) {
opts.limit = limit
} }
return await API.post({ return await API.post({
url: `/api/global/users/search`, url: `/api/global/users/search`,

View File

@ -1,7 +1,7 @@
<script> <script>
import { getContext } from "svelte" import { getContext } from "svelte"
import RelationshipCell from "./RelationshipCell.svelte" import RelationshipCell from "./RelationshipCell.svelte"
import { FieldSubtype } from "@budibase/types" import { FieldSubtype, RelationshipType } from "@budibase/types"
export let api export let api
@ -12,10 +12,14 @@
...$$props.schema, ...$$props.schema,
// This is not really used, just adding some content to be able to render the relationship cell // This is not really used, just adding some content to be able to render the relationship cell
tableId: "external", tableId: "external",
relationshipType:
subtype === FieldSubtype.USER
? RelationshipType.ONE_TO_MANY
: RelationshipType.MANY_TO_MANY,
} }
async function searchFunction(searchParams) { async function searchFunction(searchParams) {
if (subtype !== FieldSubtype.USER) { if (subtype !== FieldSubtype.USER && subtype !== FieldSubtype.USERS) {
throw `Search for '${subtype}' not implemented` throw `Search for '${subtype}' not implemented`
} }
@ -23,7 +27,7 @@
const email = Object.values(searchParams.query.string)[0] const email = Object.values(searchParams.query.string)[0]
const results = await API.searchUsers({ const results = await API.searchUsers({
email, query: { string: { email } },
}) })
// Mapping to the expected data within RelationshipCell // Mapping to the expected data within RelationshipCell

View File

@ -17,13 +17,24 @@
const { config, dispatch, selectedRows } = getContext("grid") const { config, dispatch, selectedRows } = getContext("grid")
const svelteDispatch = createEventDispatcher() const svelteDispatch = createEventDispatcher()
const select = () => { const select = e => {
e.stopPropagation()
svelteDispatch("select") svelteDispatch("select")
const id = row?._id const id = row?._id
if (id) { if (id) {
selectedRows.actions.toggleRow(id) selectedRows.actions.toggleRow(id)
} }
} }
const bulkDelete = e => {
e.stopPropagation()
dispatch("request-bulk-delete")
}
const expand = e => {
e.stopPropagation()
svelteDispatch("expand")
}
</script> </script>
<GridCell <GridCell
@ -56,7 +67,7 @@
{/if} {/if}
{/if} {/if}
{#if rowSelected && $config.canDeleteRows} {#if rowSelected && $config.canDeleteRows}
<div class="delete" on:click={() => dispatch("request-bulk-delete")}> <div class="delete" on:click={bulkDelete}>
<Icon <Icon
name="Delete" name="Delete"
size="S" size="S"
@ -65,12 +76,7 @@
</div> </div>
{:else} {:else}
<div class="expand" class:visible={$config.canExpandRows && expandable}> <div class="expand" class:visible={$config.canExpandRows && expandable}>
<Icon <Icon size="S" name="Maximize" hoverable on:click={expand} />
size="S"
name="Maximize"
hoverable
on:click={() => svelteDispatch("expand")}
/>
</div> </div>
{/if} {/if}
</div> </div>

View File

@ -1,7 +1,8 @@
<script> <script>
import { getContext, onMount, tick } from "svelte" import { getContext, onMount, tick } from "svelte"
import GridCell from "./GridCell.svelte" import { canBeDisplayColumn } from "@budibase/shared-core"
import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui" import { Icon, Popover, Menu, MenuItem, clickOutside } from "@budibase/bbui"
import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils" import { getColumnIcon } from "../lib/utils"
export let column export let column
@ -24,14 +25,6 @@
datasource, datasource,
} = getContext("grid") } = getContext("grid")
const bannedDisplayColumnTypes = [
"link",
"array",
"attachment",
"boolean",
"json",
]
let anchor let anchor
let open = false let open = false
let editIsOpen = false let editIsOpen = false
@ -231,8 +224,7 @@
<MenuItem <MenuItem
icon="Label" icon="Label"
on:click={makeDisplayColumn} on:click={makeDisplayColumn}
disabled={idx === "sticky" || disabled={idx === "sticky" || !canBeDisplayColumn(column.schema.type)}
bannedDisplayColumnTypes.includes(column.schema.type)}
> >
Use as display column Use as display column
</MenuItem> </MenuItem>

View File

@ -35,7 +35,7 @@
</script> </script>
<div bind:this={body} class="grid-body"> <div bind:this={body} class="grid-body">
<GridScrollWrapper scrollHorizontally scrollVertically wheelInteractive> <GridScrollWrapper scrollHorizontally scrollVertically attachHandlers>
{#each $renderedRows as row, idx} {#each $renderedRows as row, idx}
<GridRow <GridRow
{row} {row}

View File

@ -17,6 +17,7 @@
columnHorizontalInversionIndex, columnHorizontalInversionIndex,
contentLines, contentLines,
isDragging, isDragging,
dispatch,
} = getContext("grid") } = getContext("grid")
$: rowSelected = !!$selectedRows[row._id] $: rowSelected = !!$selectedRows[row._id]
@ -30,6 +31,7 @@
on:focus on:focus
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)} on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
> >
{#each $renderedColumns as column, columnIdx (column.name)} {#each $renderedColumns as column, columnIdx (column.name)}
{@const cellId = `${row._id}-${column.name}`} {@const cellId = `${row._id}-${column.name}`}

View File

@ -17,7 +17,11 @@
export let scrollVertically = false export let scrollVertically = false
export let scrollHorizontally = false export let scrollHorizontally = false
export let wheelInteractive = false export let attachHandlers = false
// Used for tracking touch events
let initialTouchX
let initialTouchY
$: style = generateStyle($scroll, $rowHeight, $hiddenColumnsWidth) $: style = generateStyle($scroll, $rowHeight, $hiddenColumnsWidth)
@ -27,17 +31,47 @@
return `transform: translate3d(${offsetX}px, ${offsetY}px, 0);` return `transform: translate3d(${offsetX}px, ${offsetY}px, 0);`
} }
// Handles a wheel even and updates the scroll offsets // Handles a mouse wheel event and updates scroll state
const handleWheel = e => { const handleWheel = e => {
e.preventDefault() e.preventDefault()
debouncedHandleWheel(e.deltaX, e.deltaY, e.clientY) updateScroll(e.deltaX, e.deltaY, e.clientY)
// If a context menu was visible, hide it // If a context menu was visible, hide it
if ($menu.visible) { if ($menu.visible) {
menu.actions.close() menu.actions.close()
} }
} }
const debouncedHandleWheel = domDebounce((deltaX, deltaY, clientY) => {
// Handles touch start events
const handleTouchStart = e => {
if (!e.touches?.[0]) return
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
}
// Handles touch move events and updates scroll state
const handleTouchMove = e => {
if (!e.touches?.[0]) return
e.preventDefault()
// Compute delta from previous event, and update scroll
const deltaX = initialTouchX - e.touches[0].clientX
const deltaY = initialTouchY - e.touches[0].clientY
updateScroll(deltaX, deltaY)
// Store position to reference in next event
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
// If a context menu was visible, hide it
if ($menu.visible) {
menu.actions.close()
}
}
// Updates the scroll offset by a certain delta, and ensure scrolling
// stays within sensible bounds. Debounced for performance.
const updateScroll = domDebounce((deltaX, deltaY, clientY) => {
const { top, left } = $scroll const { top, left } = $scroll
// Calculate new scroll top // Calculate new scroll top
@ -55,15 +89,19 @@
}) })
// Hover row under cursor // Hover row under cursor
const y = clientY - $bounds.top + (newScrollTop % $rowHeight) if (clientY != null) {
const hoveredRow = $renderedRows[Math.floor(y / $rowHeight)] const y = clientY - $bounds.top + (newScrollTop % $rowHeight)
hoveredRowId.set(hoveredRow?._id) const hoveredRow = $renderedRows[Math.floor(y / $rowHeight)]
hoveredRowId.set(hoveredRow?._id)
}
}) })
</script> </script>
<div <div
class="outer" class="outer"
on:wheel={wheelInteractive ? handleWheel : null} on:wheel={attachHandlers ? handleWheel : null}
on:touchstart={attachHandlers ? handleTouchStart : null}
on:touchmove={attachHandlers ? handleTouchMove : null}
on:click|self={() => ($focusedCellId = null)} on:click|self={() => ($focusedCellId = null)}
> >
<div {style} class="inner"> <div {style} class="inner">

View File

@ -205,7 +205,7 @@
{/if} {/if}
</div> </div>
<div class="normal-columns" transition:fade|local={{ duration: 130 }}> <div class="normal-columns" transition:fade|local={{ duration: 130 }}>
<GridScrollWrapper scrollHorizontally wheelInteractive> <GridScrollWrapper scrollHorizontally attachHandlers>
<div class="row"> <div class="row">
{#each $renderedColumns as column, columnIdx} {#each $renderedColumns as column, columnIdx}
{@const cellId = `new-${column.name}`} {@const cellId = `new-${column.name}`}

View File

@ -64,7 +64,7 @@
</div> </div>
<div class="content" on:mouseleave={() => ($hoveredRowId = null)}> <div class="content" on:mouseleave={() => ($hoveredRowId = null)}>
<GridScrollWrapper scrollVertically wheelInteractive> <GridScrollWrapper scrollVertically attachHandlers>
{#each $renderedRows as row, idx} {#each $renderedRows as row, idx}
{@const rowSelected = !!$selectedRows[row._id]} {@const rowSelected = !!$selectedRows[row._id]}
{@const rowHovered = $hoveredRowId === row._id} {@const rowHovered = $hoveredRowId === row._id}
@ -74,6 +74,7 @@
class="row" class="row"
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)} on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
> >
<GutterCell {row} {rowFocused} {rowHovered} {rowSelected} /> <GutterCell {row} {rowFocused} {rowHovered} {rowSelected} />
{#if $stickyColumn} {#if $stickyColumn}

View File

@ -21,6 +21,7 @@ const TypeIconMap = {
bigint: "TagBold", bigint: "TagBold",
bb_reference: { bb_reference: {
user: "User", user: "User",
users: "UserGroup",
}, },
} }

View File

@ -53,18 +53,27 @@
} }
} }
const getLocation = e => {
return {
y: e.touches?.[0]?.clientY ?? e.clientY,
x: e.touches?.[0]?.clientX ?? e.clientX,
}
}
// V scrollbar drag handlers // V scrollbar drag handlers
const startVDragging = e => { const startVDragging = e => {
e.preventDefault() e.preventDefault()
initialMouse = e.clientY initialMouse = getLocation(e).y
initialScroll = $scrollTop initialScroll = $scrollTop
document.addEventListener("mousemove", moveVDragging) document.addEventListener("mousemove", moveVDragging)
document.addEventListener("touchmove", moveVDragging)
document.addEventListener("mouseup", stopVDragging) document.addEventListener("mouseup", stopVDragging)
document.addEventListener("touchend", stopVDragging)
isDraggingV = true isDraggingV = true
closeMenu() closeMenu()
} }
const moveVDragging = domDebounce(e => { const moveVDragging = domDebounce(e => {
const delta = e.clientY - initialMouse const delta = getLocation(e).y - initialMouse
const weight = delta / availHeight const weight = delta / availHeight
const newScrollTop = initialScroll + weight * $maxScrollTop const newScrollTop = initialScroll + weight * $maxScrollTop
scroll.update(state => ({ scroll.update(state => ({
@ -74,22 +83,26 @@
}) })
const stopVDragging = () => { const stopVDragging = () => {
document.removeEventListener("mousemove", moveVDragging) document.removeEventListener("mousemove", moveVDragging)
document.removeEventListener("touchmove", moveVDragging)
document.removeEventListener("mouseup", stopVDragging) document.removeEventListener("mouseup", stopVDragging)
document.removeEventListener("touchend", stopVDragging)
isDraggingV = false isDraggingV = false
} }
// H scrollbar drag handlers // H scrollbar drag handlers
const startHDragging = e => { const startHDragging = e => {
e.preventDefault() e.preventDefault()
initialMouse = e.clientX initialMouse = getLocation(e).x
initialScroll = $scrollLeft initialScroll = $scrollLeft
document.addEventListener("mousemove", moveHDragging) document.addEventListener("mousemove", moveHDragging)
document.addEventListener("touchmove", moveHDragging)
document.addEventListener("mouseup", stopHDragging) document.addEventListener("mouseup", stopHDragging)
document.addEventListener("touchend", stopHDragging)
isDraggingH = true isDraggingH = true
closeMenu() closeMenu()
} }
const moveHDragging = domDebounce(e => { const moveHDragging = domDebounce(e => {
const delta = e.clientX - initialMouse const delta = getLocation(e).x - initialMouse
const weight = delta / availWidth const weight = delta / availWidth
const newScrollLeft = initialScroll + weight * $maxScrollLeft const newScrollLeft = initialScroll + weight * $maxScrollLeft
scroll.update(state => ({ scroll.update(state => ({
@ -99,7 +112,9 @@
}) })
const stopHDragging = () => { const stopHDragging = () => {
document.removeEventListener("mousemove", moveHDragging) document.removeEventListener("mousemove", moveHDragging)
document.removeEventListener("touchmove", moveHDragging)
document.removeEventListener("mouseup", stopHDragging) document.removeEventListener("mouseup", stopHDragging)
document.removeEventListener("touchend", stopHDragging)
isDraggingH = false isDraggingH = false
} }
</script> </script>
@ -109,6 +124,7 @@
class="v-scrollbar" class="v-scrollbar"
style="--size:{ScrollBarSize}px; top:{barTop}px; height:{barHeight}px;" style="--size:{ScrollBarSize}px; top:{barTop}px; height:{barHeight}px;"
on:mousedown={startVDragging} on:mousedown={startVDragging}
on:touchstart={startVDragging}
class:dragging={isDraggingV} class:dragging={isDraggingV}
/> />
{/if} {/if}
@ -117,6 +133,7 @@
class="h-scrollbar" class="h-scrollbar"
style="--size:{ScrollBarSize}px; left:{barLeft}px; width:{barWidth}px;" style="--size:{ScrollBarSize}px; left:{barLeft}px; width:{barWidth}px;"
on:mousedown={startHDragging} on:mousedown={startHDragging}
on:touchstart={startHDragging}
class:dragging={isDraggingH} class:dragging={isDraggingH}
/> />
{/if} {/if}

View File

@ -1,4 +1,5 @@
import { writable, get } from "svelte/store" import { writable, get } from "svelte/store"
import { Helpers } from "@budibase/bbui"
export const createStores = () => { export const createStores = () => {
const copiedCell = writable(null) const copiedCell = writable(null)
@ -12,7 +13,16 @@ export const createActions = context => {
const { copiedCell, focusedCellAPI } = context const { copiedCell, focusedCellAPI } = context
const copy = () => { const copy = () => {
copiedCell.set(get(focusedCellAPI)?.getValue()) const value = get(focusedCellAPI)?.getValue()
copiedCell.set(value)
// Also copy a stringified version to the clipboard
let stringified = ""
if (value != null && value !== "") {
// Only conditionally stringify to avoid redundant quotes around text
stringified = typeof value === "object" ? JSON.stringify(value) : value
}
Helpers.copyToClipboard(stringified)
} }
const paste = () => { const paste = () => {

View File

@ -1,6 +1,7 @@
import { get } from "svelte/store" import { get } from "svelte/store"
import DataFetch from "./DataFetch.js" import DataFetch from "./DataFetch.js"
import { TableNames } from "../constants" import { TableNames } from "../constants"
import { LuceneUtils } from "../utils"
export default class UserFetch extends DataFetch { export default class UserFetch extends DataFetch {
constructor(opts) { constructor(opts) {
@ -27,16 +28,25 @@ export default class UserFetch extends DataFetch {
} }
async getData() { async getData() {
const { limit, paginate } = this.options
const { cursor, query } = get(this.store) const { cursor, query } = get(this.store)
let finalQuery
// convert old format to new one - we now allow use of the lucene format
const { appId, paginated, ...rest } = query
if (!LuceneUtils.hasFilters(query) && rest.email) {
finalQuery = { string: { email: rest.email } }
} else {
finalQuery = rest
}
try { try {
// "query" normally contains a lucene query, but users uses a non-standard const opts = {
// search endpoint so we use query uniquely here bookmark: cursor,
const res = await this.API.searchUsers({ query: finalQuery,
page: cursor, appId: appId,
email: query.email, paginate: paginated || paginate,
appId: query.appId, limit,
paginated: query.paginated, }
}) const res = await this.API.searchUsers(opts)
return { return {
rows: res?.data || [], rows: res?.data || [],
hasNextPage: res?.hasNextPage || false, hasNextPage: res?.hasNextPage || false,

@ -1 +1 @@
Subproject commit 30385682141e5ba9d98de7d71d5be1672109cd15 Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e

View File

@ -11,16 +11,14 @@
"scripts": { "scripts": {
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"build": "node ./scripts/build.js", "build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null", "check:types": "tsc -p tsconfig.json --noEmit --paths null",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit", "test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch", "test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/", "build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docs": "node ./scripts/docs/generate.js open",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",
@ -55,7 +53,7 @@
"@bull-board/api": "3.7.0", "@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4", "@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "5.0.2", "@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8", "@koa/router": "8.0.8",
"@sentry/node": "6.17.7", "@sentry/node": "6.17.7",
"@socket.io/redis-adapter": "^8.2.1", "@socket.io/redis-adapter": "^8.2.1",
@ -71,7 +69,6 @@
"curlconverter": "3.21.0", "curlconverter": "3.21.0",
"dd-trace": "3.13.2", "dd-trace": "3.13.2",
"dotenv": "8.2.0", "dotenv": "8.2.0",
"fix-path": "3.0.0",
"form-data": "4.0.0", "form-data": "4.0.0",
"global-agent": "3.0.0", "global-agent": "3.0.0",
"google-auth-library": "7.12.0", "google-auth-library": "7.12.0",
@ -97,12 +94,11 @@
"object-sizeof": "2.6.1", "object-sizeof": "2.6.1",
"open": "8.4.0", "open": "8.4.0",
"openai": "^3.2.1", "openai": "^3.2.1",
"openapi-types": "9.3.1",
"pg": "8.10.0", "pg": "8.10.0",
"posthog-node": "1.3.0",
"pouchdb": "7.3.0", "pouchdb": "7.3.0",
"pouchdb-all-dbs": "1.0.2", "pouchdb-all-dbs": "1.1.1",
"pouchdb-find": "7.2.2", "pouchdb-find": "7.2.2",
"pouchdb-replication-stream": "1.2.9",
"redis": "4", "redis": "4",
"server-destroy": "1.0.1", "server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0", "snowflake-promise": "^4.5.0",
@ -114,10 +110,9 @@
"to-json-schema": "0.2.5", "to-json-schema": "0.2.5",
"uuid": "3.3.2", "uuid": "3.3.2",
"validate.js": "0.13.1", "validate.js": "0.13.1",
"vm2": "3.9.17", "vm2": "^3.9.19",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.5.0", "xml2js": "0.5.0"
"yargs": "13.2.4"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "7.17.4", "@babel/core": "7.17.4",
@ -127,7 +122,7 @@
"@trendyol/jest-testcontainers": "2.1.1", "@trendyol/jest-testcontainers": "2.1.1",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5", "@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.3", "@types/jest": "29.5.5",
"@types/koa": "2.13.4", "@types/koa": "2.13.4",
"@types/koa__router": "8.0.8", "@types/koa__router": "8.0.8",
"@types/lodash": "4.14.180", "@types/lodash": "4.14.180",
@ -147,7 +142,6 @@
"jest-runner": "29.6.2", "jest-runner": "29.6.2",
"jest-serial-runner": "1.2.1", "jest-serial-runner": "1.2.1",
"nodemon": "2.0.15", "nodemon": "2.0.15",
"openapi-types": "9.3.1",
"openapi-typescript": "5.2.0", "openapi-typescript": "5.2.0",
"path-to-regexp": "6.2.0", "path-to-regexp": "6.2.0",
"rimraf": "3.0.2", "rimraf": "3.0.2",
@ -157,7 +151,8 @@
"ts-node": "10.8.1", "ts-node": "10.8.1",
"tsconfig-paths": "4.0.0", "tsconfig-paths": "4.0.0",
"typescript": "5.2.2", "typescript": "5.2.2",
"update-dotenv": "1.1.1" "update-dotenv": "1.1.1",
"yargs": "13.2.4"
}, },
"optionalDependencies": { "optionalDependencies": {
"oracledb": "5.3.0" "oracledb": "5.3.0"
@ -174,6 +169,22 @@
"target": "build" "target": "build"
} }
] ]
},
"build": {
"outputs": [
"{projectRoot}/builder",
"{projectRoot}/client",
"{projectRoot}/dist"
],
"dependsOn": [
{
"projects": [
"@budibase/client",
"@budibase/builder"
],
"target": "build"
}
]
} }
} }
} }

View File

@ -1,31 +0,0 @@
### Documentation
This directory contains the scripts required to generate the APIDoc based documentation.
You can find the docs about comment structure at the [APIDocs page](https://apidocjs.com/).
In general most API endpoints will look like:
```js
/**
* @api {post} /api/:param/url Give it a name
* @apiName Give it a name
* @apiGroup group
* @apiPermission permission
* @apiDescription Describe what the endpoint does, any special cases the user
* should be aware of.
*
* @apiParam {string} param describe a URL parameter.
*
* @apiParam (Body) input describe a field on the body.
*
* @apiSuccess {object} output describe the output.
*/
```
There are a few key points to note when writing API docs:
1. Don't use `@apiBody` - this isn't currently supported by our swagger converter.
2. Make sure to always have an `@api` definition at the start, which must always have the
HTTP verb, the endpoint URL and the name.
3. There are three ways you can specify parameters used as inputs for your endpoint,
`@apiParam` for a URL param, `@apiParam (Body)` for a field on the request body and `@apiParam (Query)`
for query string parameters.
4. The `@apiGroup` should be the same for all API Doc comments in a route file.

View File

@ -1,74 +0,0 @@
const fs = require("fs")
const { join } = require("path")
const { createDoc } = require("apidoc")
const packageJson = require("../../package.json")
const toSwagger = require("./toSwagger")
const open = require("open")
const config = {
name: "Budibase API",
version: packageJson.version,
description: "Documenting the Budibase backend API",
title: "Budibase app service API",
}
const shouldOpen = process.argv[2]
const disallowed = []
function filter(parsedRouteFiles) {
const tagToSearch = "url"
for (let routeFile of parsedRouteFiles) {
for (let route of routeFile) {
let routeInfo = route["local"]
if (disallowed.includes(routeInfo[tagToSearch])) {
const idx = routeFile.indexOf(route)
routeFile.splice(idx, 1)
}
}
}
}
async function generate() {
// start by writing a config file
const configPath = join(__dirname, "config.json")
fs.writeFileSync(configPath, JSON.stringify(config))
const mainPath = join(__dirname, "..", "..")
const srcPath = join(mainPath, "src", "api", "routes")
const assetsPath = join(mainPath, "builder", "assets", "docs")
if (!fs.existsSync(assetsPath)) {
fs.mkdirSync(assetsPath, { recursive: true })
}
const options = {
src: [srcPath],
dest: assetsPath,
filters: {
main: {
postFilter: filter,
},
},
config: configPath,
}
const doc = createDoc(options)
if (typeof doc !== "boolean") {
const swagger = toSwagger(JSON.parse(doc.data), JSON.parse(doc.project))
fs.writeFileSync(join(assetsPath, "swagger.json"), JSON.stringify(swagger))
fs.writeFileSync(join(assetsPath, "apidoc.json"), doc.data)
fs.writeFileSync(join(assetsPath, "project.json"), doc.project)
console.log(
`Docs generated successfully, find in ${assetsPath}, swagger.json, apidoc.json and project.json`
)
} else {
throw "Unable to generate docs."
}
// delete the temporary config file
fs.unlinkSync(configPath)
setTimeout(async () => {
if (shouldOpen === "open") {
await open(join(assetsPath, "index.html"), { wait: false })
}
}, 2000)
}
generate().catch(err => {
console.error(err)
})

View File

@ -1,320 +0,0 @@
let _ = require("lodash")
let { pathToRegexp } = require("path-to-regexp")
/********************************************************
* Based on: https://github.com/fsbahman/apidoc-swagger *
********************************************************/
let swagger = {
swagger: "2.0",
info: {},
paths: {},
definitions: {},
}
function toSwagger(apidocJson, projectJson) {
swagger.info = addInfo(projectJson)
swagger.paths = extractPaths(apidocJson)
return swagger
}
let tagsRegex = /(<([^>]+)>)/gi
// Removes <p> </p> tags from text
function removeTags(text) {
return text ? text.replace(tagsRegex, "") : text
}
function addInfo(projectJson) {
let info = {}
info["title"] = projectJson.title || projectJson.name
info["version"] = projectJson.version
info["description"] = projectJson.description
return info
}
/**
* Extracts paths provided in json format
* post, patch, put request parameters are extracted in body
* get and delete are extracted to path parameters
* @param apidocJson
* @returns {{}}
*/
function extractPaths(apidocJson) {
let apiPaths = groupByUrl(apidocJson)
let paths = {}
for (let i = 0; i < apiPaths.length; i++) {
let verbs = apiPaths[i].verbs
let url = verbs[0].url
let pattern = pathToRegexp(url, null)
let matches = pattern.exec(url)
// Surrounds URL parameters with curly brackets -> :email with {email}
let pathKeys = []
for (let j = 1; j < matches.length; j++) {
let key = matches[j].slice(1)
url = url.replace(matches[j], "{" + key + "}")
pathKeys.push(key)
}
for (let j = 0; j < verbs.length; j++) {
let verb = verbs[j]
let type = verb.type
let obj = (paths[url] = paths[url] || {})
if (type === "post" || type === "patch" || type === "put") {
_.extend(
obj,
createPostPushPutOutput(verb, swagger.definitions, pathKeys)
)
} else {
_.extend(obj, createGetDeleteOutput(verb, swagger.definitions))
}
}
}
return paths
}
function createPostPushPutOutput(verbs, definitions, pathKeys) {
let pathItemObject = {}
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
let params = []
let pathParams = createPathParameters(verbs, pathKeys)
pathParams = _.filter(pathParams, function (param) {
let hasKey = pathKeys.indexOf(param.name) !== -1
return !(param.in === "path" && !hasKey)
})
params = params.concat(pathParams)
let required =
verbs.parameter &&
verbs.parameter.fields &&
verbs.parameter.fields.Parameter &&
verbs.parameter.fields.Parameter.length > 0
params.push({
in: "body",
name: "body",
description: removeTags(verbs.description),
required: required,
schema: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelParametersRef,
},
})
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: params,
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
function createVerbDefinitions(verbs, definitions) {
let result = {
topLevelParametersRef: null,
topLevelSuccessRef: null,
topLevelSuccessRefType: null,
}
let defaultObjectName = verbs.name
let fieldArrayResult = {}
if (verbs && verbs.parameter && verbs.parameter.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.parameter.fields.Parameter,
definitions,
verbs.name,
defaultObjectName
)
result.topLevelParametersRef = fieldArrayResult.topLevelRef
}
if (verbs && verbs.success && verbs.success.fields) {
fieldArrayResult = createFieldArrayDefinitions(
verbs.success.fields["Success 200"],
definitions,
verbs.name,
defaultObjectName
)
result.topLevelSuccessRef = fieldArrayResult.topLevelRef
result.topLevelSuccessRefType = fieldArrayResult.topLevelRefType
}
return result
}
function createFieldArrayDefinitions(
fieldArray,
definitions,
topLevelRef,
defaultObjectName
) {
let result = {
topLevelRef: topLevelRef,
topLevelRefType: null,
}
if (!fieldArray) {
return result
}
for (let i = 0; i < fieldArray.length; i++) {
let parameter = fieldArray[i]
let nestedName = createNestedName(parameter.field)
let objectName = nestedName.objectName
if (!objectName) {
objectName = defaultObjectName
}
let type = parameter.type
if (i === 0) {
result.topLevelRefType = type
if (parameter.type === "Object") {
objectName = nestedName.propertyName
nestedName.propertyName = null
} else if (parameter.type === "Array") {
objectName = nestedName.propertyName
nestedName.propertyName = null
result.topLevelRefType = "array"
}
result.topLevelRef = objectName
}
definitions[objectName] = definitions[objectName] || {
properties: {},
required: [],
}
if (nestedName.propertyName) {
let prop = {
type: (parameter.type || "").toLowerCase(),
description: removeTags(parameter.description),
}
if (parameter.type === "Object") {
prop.$ref = "#/definitions/" + parameter.field
}
let typeIndex = type.indexOf("[]")
if (typeIndex !== -1 && typeIndex === type.length - 2) {
prop.type = "array"
prop.items = {
type: type.slice(0, type.length - 2),
}
}
definitions[objectName]["properties"][nestedName.propertyName] = prop
if (!parameter.optional) {
let arr = definitions[objectName]["required"]
if (arr.indexOf(nestedName.propertyName) === -1) {
arr.push(nestedName.propertyName)
}
}
}
}
return result
}
function createNestedName(field) {
let propertyName = field
let objectName
let propertyNames = field.split(".")
if (propertyNames && propertyNames.length > 1) {
propertyName = propertyNames[propertyNames.length - 1]
propertyNames.pop()
objectName = propertyNames.join(".")
}
return {
propertyName: propertyName,
objectName: objectName,
}
}
/**
* Generate get, delete method output
* @param verbs
* @param definitions
* @returns {{}}
*/
function createGetDeleteOutput(verbs, definitions) {
let pathItemObject = {}
verbs.type = verbs.type === "del" ? "delete" : verbs.type
let verbDefinitionResult = createVerbDefinitions(verbs, definitions)
pathItemObject[verbs.type] = {
tags: [verbs.group],
summary: removeTags(verbs.description),
consumes: ["application/json"],
produces: ["application/json"],
parameters: createPathParameters(verbs),
}
if (verbDefinitionResult.topLevelSuccessRef) {
pathItemObject[verbs.type].responses = {
200: {
description: "successful operation",
schema: {
type: verbDefinitionResult.topLevelSuccessRefType,
items: {
$ref: "#/definitions/" + verbDefinitionResult.topLevelSuccessRef,
},
},
},
}
}
return pathItemObject
}
/**
* Iterate through all method parameters and create array of parameter objects which are stored as path parameters
* @param verbs
* @returns {Array}
*/
function createPathParameters(verbs) {
let pathItemObject = []
if (verbs.parameter && verbs.parameter.fields.Parameter) {
for (let i = 0; i < verbs.parameter.fields.Parameter.length; i++) {
let param = verbs.parameter.fields.Parameter[i]
let field = param.field
let type = param.type
pathItemObject.push({
name: field,
in: type === "file" ? "formData" : "path",
required: !param.optional,
type: param.type.toLowerCase(),
description: removeTags(param.description),
})
}
}
return pathItemObject
}
function groupByUrl(apidocJson) {
return _.chain(apidocJson)
.groupBy("url")
.toPairs()
.map(function (element) {
return _.zipObject(["url", "verbs"], element)
})
.value()
}
module.exports = toSwagger

View File

@ -2,7 +2,7 @@ version: "3.8"
services: services:
db: db:
container_name: postgres container_name: postgres
image: postgres image: postgres:15
restart: unless-stopped restart: unless-stopped
environment: environment:
POSTGRES_USER: root POSTGRES_USER: root
@ -25,4 +25,4 @@ services:
- "5050:80" - "5050:80"
volumes: volumes:
pg_data: pg_data:

View File

@ -859,7 +859,8 @@
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
"bigint" "bigint",
"bb_reference"
], ],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship." "description": "Defines the type of the column, most explain themselves, a link column is a relationship."
}, },
@ -1064,7 +1065,8 @@
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
"bigint" "bigint",
"bb_reference"
], ],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship." "description": "Defines the type of the column, most explain themselves, a link column is a relationship."
}, },
@ -1280,7 +1282,8 @@
"json", "json",
"internal", "internal",
"barcodeqr", "barcodeqr",
"bigint" "bigint",
"bb_reference"
], ],
"description": "Defines the type of the column, most explain themselves, a link column is a relationship." "description": "Defines the type of the column, most explain themselves, a link column is a relationship."
}, },

View File

@ -782,6 +782,7 @@ components:
- internal - internal
- barcodeqr - barcodeqr
- bigint - bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link description: Defines the type of the column, most explain themselves, a link
column is a relationship. column is a relationship.
constraints: constraints:
@ -946,6 +947,7 @@ components:
- internal - internal
- barcodeqr - barcodeqr
- bigint - bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link description: Defines the type of the column, most explain themselves, a link
column is a relationship. column is a relationship.
constraints: constraints:
@ -1117,6 +1119,7 @@ components:
- internal - internal
- barcodeqr - barcodeqr
- bigint - bigint
- bb_reference
description: Defines the type of the column, most explain themselves, a link description: Defines the type of the column, most explain themselves, a link
column is a relationship. column is a relationship.
constraints: constraints:

View File

@ -289,6 +289,7 @@ async function performAppCreate(ctx: UserCtx) {
}, },
features: { features: {
componentValidation: true, componentValidation: true,
disableUserMetadata: true,
}, },
} }
@ -310,10 +311,13 @@ async function performAppCreate(ctx: UserCtx) {
} }
}) })
// Keep existing validation setting // Keep existing feature flags
if (!existing.features?.componentValidation) { if (!existing.features?.componentValidation) {
newApplication.features!.componentValidation = false newApplication.features!.componentValidation = false
} }
if (!existing.features?.disableUserMetadata) {
newApplication.features!.disableUserMetadata = false
}
// Migrate navigation settings and screens if required // Migrate navigation settings and screens if required
if (existing) { if (existing) {

View File

@ -5,7 +5,6 @@ import {
getTableParams, getTableParams,
} from "../../db/utils" } from "../../db/utils"
import { destroy as tableDestroy } from "./table/internal" import { destroy as tableDestroy } from "./table/internal"
import { BuildSchemaErrors, InvalidColumns } from "../../constants"
import { getIntegration } from "../../integrations" import { getIntegration } from "../../integrations"
import { invalidateDynamicVariables } from "../../threads/utils" import { invalidateDynamicVariables } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core" import { context, db as dbCore, events } from "@budibase/backend-core"
@ -14,10 +13,13 @@ import {
CreateDatasourceResponse, CreateDatasourceResponse,
Datasource, Datasource,
DatasourcePlus, DatasourcePlus,
ExternalTable,
FetchDatasourceInfoRequest, FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse, FetchDatasourceInfoResponse,
IntegrationBase, IntegrationBase,
Schema,
SourceName, SourceName,
Table,
UpdateDatasourceResponse, UpdateDatasourceResponse,
UserCtx, UserCtx,
VerifyDatasourceRequest, VerifyDatasourceRequest,
@ -27,23 +29,6 @@ import sdk from "../../sdk"
import { builderSocket } from "../../websockets" import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets" import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
function getErrorTables(errors: any, errorType: string) {
return Object.entries(errors)
.filter(entry => entry[1] === errorType)
.map(([name]) => name)
}
function updateError(error: any, newError: any, tables: string[]) {
if (!error) {
error = ""
}
if (error.length > 0) {
error += "\n"
}
error += `${newError} ${tables.join(", ")}`
return error
}
async function getConnector( async function getConnector(
datasource: Datasource datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> { ): Promise<IntegrationBase | DatasourcePlus> {
@ -71,48 +56,36 @@ async function getAndMergeDatasource(datasource: Datasource) {
return await sdk.datasources.enrich(enrichedDatasource) return await sdk.datasources.enrich(enrichedDatasource)
} }
async function buildSchemaHelper(datasource: Datasource) { async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus const connector = (await getConnector(datasource)) as DatasourcePlus
await connector.buildSchema(datasource._id!, datasource.entities!) return await connector.buildSchema(
datasource._id!,
const errors = connector.schemaErrors datasource.entities! as Record<string, ExternalTable>
let error = null )
if (errors && Object.keys(errors).length > 0) {
const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY)
const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN)
if (noKey.length) {
error = updateError(
error,
"No primary key constraint found for the following:",
noKey
)
}
if (invalidCol.length) {
const invalidCols = Object.values(InvalidColumns).join(", ")
error = updateError(
error,
`Cannot use columns ${invalidCols} found in following:`,
invalidCol
)
}
}
return { tables: connector.tables, error }
} }
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) { async function buildFilteredSchema(
let { tables, error } = await buildSchemaHelper(datasource) datasource: Datasource,
let finalTables = tables filter?: string[]
if (filter) { ): Promise<Schema> {
finalTables = {} let schema = await buildSchemaHelper(datasource)
for (let key in tables) { if (!filter) {
if ( return schema
filter.some((filter: any) => filter.toLowerCase() === key.toLowerCase()) }
) {
finalTables[key] = tables[key] let filteredSchema: Schema = { tables: {}, errors: {} }
} for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
} }
} }
return { tables: finalTables, error }
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
}
return filteredSchema
} }
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
@ -156,7 +129,7 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const tablesFilter = ctx.request.body.tablesFilter const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId) const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const { tables, error } = await buildFilteredSchema(datasource, tablesFilter) const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables datasource.entities = tables
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)
@ -164,13 +137,11 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
sdk.tables.populateExternalTableSchemas(datasource) sdk.tables.populateExternalTableSchemas(datasource)
) )
datasource._rev = dbResp.rev datasource._rev = dbResp.rev
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
const res: any = { datasource: cleanedDatasource } ctx.body = {
if (error) { datasource: await sdk.datasources.removeSecretSingle(datasource),
res.error = error errors,
} }
ctx.body = res
} }
/** /**
@ -298,15 +269,12 @@ export async function save(
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE, type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
} }
let schemaError = null let errors: Record<string, string> = {}
if (fetchSchema) { if (fetchSchema) {
const { tables, error } = await buildFilteredSchema( const schema = await buildFilteredSchema(datasource, tablesFilter)
datasource, datasource.entities = schema.tables
tablesFilter
)
schemaError = error
datasource.entities = tables
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)
errors = schema.errors
} }
if (preSaveAction[datasource.source]) { if (preSaveAction[datasource.source]) {
@ -327,13 +295,10 @@ export async function save(
} }
} }
const response: CreateDatasourceResponse = { ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource), datasource: await sdk.datasources.removeSecretSingle(datasource),
errors,
} }
if (schemaError) {
response.error = schemaError
}
ctx.body = response
builderSocket?.emitDatasourceUpdate(ctx, datasource) builderSocket?.emitDatasourceUpdate(ctx, datasource)
} }

View File

@ -5,8 +5,11 @@ import {
FieldType, FieldType,
FilterType, FilterType,
IncludeRelationship, IncludeRelationship,
OneToManyRelationshipFieldMetadata,
Operation, Operation,
PaginationJson, PaginationJson,
RelationshipFieldMetadata,
RelationshipsJson,
RelationshipType, RelationshipType,
Row, Row,
SearchFilters, SearchFilters,
@ -22,16 +25,21 @@ import {
isSQL, isSQL,
} from "../../../integrations/utils" } from "../../../integrations/utils"
import { import {
generateIdForRow,
buildExternalRelationships, buildExternalRelationships,
buildSqlFieldList, buildSqlFieldList,
generateIdForRow,
sqlOutputProcessing, sqlOutputProcessing,
squashRelationshipColumns,
updateRelationshipColumns,
fixArrayTypes,
isManyToMany,
} from "./utils" } from "./utils"
import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
import { AutoFieldSubTypes, FieldTypes } from "../../../constants" import { AutoFieldSubTypes, FieldTypes } from "../../../constants"
import { processObjectSync } from "@budibase/string-templates" import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import { processDates, processFormulas } from "../../../utilities/rowProcessor"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export interface ManyRelationship { export interface ManyRelationship {
@ -161,7 +169,60 @@ function getEndpoint(tableId: string | undefined, operation: string) {
} }
} }
function isOneSide(field: FieldSchema) { // need to handle table name + field or just field, depending on if relationships used
function extractFieldValue({
row,
tableName,
fieldName,
isLinked,
}: {
row: Row
tableName: string
fieldName: string
isLinked: boolean
}) {
let value = row[`${tableName}.${fieldName}`]
if (value == null && !isLinked) {
value = row[fieldName]
}
return value
}
function basicProcessing({
row,
table,
isLinked,
}: {
row: Row
table: Table
isLinked: boolean
}): Row {
const thisRow: Row = {}
// filter the row down to what is actually the row (not joined)
for (let field of Object.values(table.schema)) {
const fieldName = field.name
const value = extractFieldValue({
row,
tableName: table.name,
fieldName,
isLinked,
})
// all responses include "select col as table.col" so that overlaps are handled
if (value != null) {
thisRow[fieldName] = value
}
}
thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id
thisRow._rev = "rev"
return processFormulas(table, thisRow)
}
function isOneSide(
field: RelationshipFieldMetadata
): field is OneToManyRelationshipFieldMetadata {
return ( return (
field.relationshipType && field.relationshipType.split("-")[0] === "one" field.relationshipType && field.relationshipType.split("-")[0] === "one"
) )
@ -259,11 +320,11 @@ export class ExternalRequest<T extends Operation> {
} }
} }
// many to many // many to many
else if (field.through) { else if (isManyToMany(field)) {
// we're not inserting a doc, will be a bunch of update calls // we're not inserting a doc, will be a bunch of update calls
const otherKey: string = field.throughFrom || linkTablePrimary const otherKey: string = field.throughFrom || linkTablePrimary
const thisKey: string = field.throughTo || tablePrimary const thisKey: string = field.throughTo || tablePrimary
row[key].forEach((relationship: any) => { for (const relationship of row[key]) {
manyRelationships.push({ manyRelationships.push({
tableId: field.through || field.tableId, tableId: field.through || field.tableId,
isUpdate: false, isUpdate: false,
@ -272,14 +333,14 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later // leave the ID for enrichment later
[thisKey]: `{{ literal ${tablePrimary} }}`, [thisKey]: `{{ literal ${tablePrimary} }}`,
}) })
}) }
} }
// many to one // many to one
else { else {
const thisKey: string = "id" const thisKey: string = "id"
// @ts-ignore // @ts-ignore
const otherKey: string = field.fieldName const otherKey: string = field.fieldName
row[key].forEach((relationship: any) => { for (const relationship of row[key]) {
manyRelationships.push({ manyRelationships.push({
tableId: field.tableId, tableId: field.tableId,
isUpdate: true, isUpdate: true,
@ -288,7 +349,7 @@ export class ExternalRequest<T extends Operation> {
// leave the ID for enrichment later // leave the ID for enrichment later
[otherKey]: `{{ literal ${tablePrimary} }}`, [otherKey]: `{{ literal ${tablePrimary} }}`,
}) })
}) }
} }
} }
// we return the relationships that may need to be created in the through table // we return the relationships that may need to be created in the through table
@ -297,6 +358,57 @@ export class ExternalRequest<T extends Operation> {
return { row: newRow, manyRelationships } return { row: newRow, manyRelationships }
} }
outputProcessing(
rows: Row[] = [],
table: Table,
relationships: RelationshipsJson[]
) {
if (!rows || rows.length === 0 || rows[0].read === true) {
return []
}
const tableMap = this.tables
let finalRows: { [key: string]: Row } = {}
for (let row of rows) {
const rowId = generateIdForRow(row, table)
row._id = rowId
// this is a relationship of some sort
if (finalRows[rowId]) {
finalRows = updateRelationshipColumns(
table,
tableMap,
row,
finalRows,
relationships
)
continue
}
const thisRow = fixArrayTypes(
basicProcessing({ row, table, isLinked: false }),
table
)
if (thisRow._id == null) {
throw "Unable to generate row ID for SQL rows"
}
finalRows[thisRow._id] = thisRow
// do this at end once its been added to the final rows
finalRows = updateRelationshipColumns(
table,
tableMap,
row,
finalRows,
relationships
)
}
// Process some additional data types
let finalRowArray = Object.values(finalRows)
finalRowArray = processDates(table, finalRowArray)
finalRowArray = processFormulas(table, finalRowArray) as Row[]
return finalRowArray.map((row: Row) =>
squashRelationshipColumns(table, tableMap, row, relationships)
)
}
/** /**
* This is a cached lookup, of relationship records, this is mainly for creating/deleting junction * This is a cached lookup, of relationship records, this is mainly for creating/deleting junction
* information. * information.
@ -312,7 +424,7 @@ export class ExternalRequest<T extends Operation> {
const primaryKey = table.primary[0] const primaryKey = table.primary[0]
// make a new request to get the row with all its relationships // make a new request to get the row with all its relationships
// we need this to work out if any relationships need removed // we need this to work out if any relationships need removed
for (let field of Object.values(table.schema) as FieldSchema[]) { for (const field of Object.values(table.schema)) {
if ( if (
field.type !== FieldTypes.LINK || field.type !== FieldTypes.LINK ||
!field.fieldName || !field.fieldName ||
@ -325,9 +437,9 @@ export class ExternalRequest<T extends Operation> {
const { tableName: relatedTableName } = breakExternalTableId(tableId) const { tableName: relatedTableName } = breakExternalTableId(tableId)
// @ts-ignore // @ts-ignore
const linkPrimaryKey = this.tables[relatedTableName].primary[0] const linkPrimaryKey = this.tables[relatedTableName].primary[0]
const manyKey = field.throughTo || primaryKey
const lookupField = isMany ? primaryKey : field.foreignKey const lookupField = isMany ? primaryKey : field.foreignKey
const fieldName = isMany ? manyKey : field.fieldName const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName
if (!lookupField || !row[lookupField]) { if (!lookupField || !row[lookupField]) {
continue continue
} }
@ -384,7 +496,7 @@ export class ExternalRequest<T extends Operation> {
linkPrimary, linkPrimary,
linkSecondary, linkSecondary,
}: { }: {
row: { [key: string]: any } row: Record<string, any>
linkPrimary: string linkPrimary: string
linkSecondary?: string linkSecondary?: string
}) { }) {
@ -446,41 +558,6 @@ export class ExternalRequest<T extends Operation> {
await Promise.all(promises) await Promise.all(promises)
} }
/**
* This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which
* you have column overlap in relationships, e.g. we join a few different tables and they all have the
* concept of an ID, but for some of them it will be null (if they say don't have a relationship).
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario.
*/
buildFields(table: Table, includeRelations: boolean) {
function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema)
.filter(
column =>
column[1].type !== FieldTypes.LINK &&
column[1].type !== FieldTypes.FORMULA &&
!existing.find((field: string) => field === column[0])
)
.map(column => `${table.name}.${column[0]}`)
}
let fields = extractRealFields(table)
for (let field of Object.values(table.schema)) {
if (field.type !== FieldTypes.LINK || !includeRelations) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
if (linkTableName) {
const linkTable = this.tables[linkTableName]
if (linkTable) {
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
}
}
}
return fields
}
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> { async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
const { operation, tableId } = this const { operation, tableId } = this
let { datasourceId, tableName } = breakExternalTableId(tableId) let { datasourceId, tableName } = breakExternalTableId(tableId)
@ -580,11 +657,10 @@ export class ExternalRequest<T extends Operation> {
relationships relationships
) )
// if reading it'll just be an array of rows, return whole thing // if reading it'll just be an array of rows, return whole thing
const result = ( return (
operation === Operation.READ && Array.isArray(response) operation === Operation.READ && Array.isArray(response)
? output ? output
: { row: output[0], table } : { row: output[0], table }
) as ExternalRequestReturnType<T> ) as ExternalRequestReturnType<T>
return result
} }
} }

View File

@ -167,7 +167,10 @@ export async function destroy(ctx: UserCtx) {
} }
const table = await sdk.tables.getTable(row.tableId) const table = await sdk.tables.getTable(row.tableId)
// update the row to include full relationships before deleting them // update the row to include full relationships before deleting them
row = await outputProcessing(table, row, { squash: false }) row = await outputProcessing(table, row, {
squash: false,
skipBBReferences: true,
})
// now remove the relationships // now remove the relationships
await linkRows.updateLinks({ await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_DELETE, eventType: linkRows.EventType.ROW_DELETE,
@ -201,6 +204,7 @@ export async function bulkDestroy(ctx: UserCtx) {
// they need to be the full rows (including previous relationships) for automations // they need to be the full rows (including previous relationships) for automations
const processedRows = (await outputProcessing(table, rows, { const processedRows = (await outputProcessing(table, rows, {
squash: false, squash: false,
skipBBReferences: true,
})) as Row[] })) as Row[]
// remove the relationships first // remove the relationships first

View File

@ -1,4 +1,12 @@
import { FieldType, RelationshipsJson, Row, Table } from "@budibase/types" import {
FieldType,
ManyToManyRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
RelationshipFieldMetadata,
RelationshipsJson,
Row,
Table,
} from "@budibase/types"
import { processFormulas } from "../../../../utilities/rowProcessor" import { processFormulas } from "../../../../utilities/rowProcessor"
import { import {
breakExternalTableId, breakExternalTableId,
@ -9,6 +17,12 @@ import { generateJunctionTableID } from "../../../../db/utils"
type TableMap = Record<string, Table> type TableMap = Record<string, Table>
export function isManyToMany(
field: RelationshipFieldMetadata
): field is ManyToManyRelationshipFieldMetadata {
return !!(field as ManyToManyRelationshipFieldMetadata).through
}
export function squashRelationshipColumns( export function squashRelationshipColumns(
table: Table, table: Table,
tables: TableMap, tables: TableMap,
@ -88,7 +102,7 @@ export function updateRelationshipColumns(
columns[relationship.column] = linked columns[relationship.column] = linked
} }
for (let [column, related] of Object.entries(columns)) { for (let [column, related] of Object.entries(columns)) {
let rowId: string = row._id! let rowId = row._id
if (opts?.internal) { if (opts?.internal) {
const { _id } = basicProcessing({ const { _id } = basicProcessing({
row, row,
@ -137,15 +151,16 @@ export function buildExternalRelationships(
if (!table.primary || !linkTable.primary) { if (!table.primary || !linkTable.primary) {
continue continue
} }
const definition: any = { const foreignKey = (field as OneToManyRelationshipFieldMetadata).foreignKey
const definition: RelationshipsJson = {
// if no foreign key specified then use the name of the field in other table // if no foreign key specified then use the name of the field in other table
from: field.foreignKey || table.primary[0], from: foreignKey || table.primary[0],
to: field.fieldName, to: field.fieldName,
tableName: linkTableName, tableName: linkTableName,
// need to specify where to put this back into // need to specify where to put this back into
column: fieldName, column: fieldName,
} }
if (field.through) { if (isManyToMany(field) && field.through) {
const { tableName: throughTableName } = breakExternalTableId( const { tableName: throughTableName } = breakExternalTableId(
field.through field.through
) )
@ -168,6 +183,9 @@ export function buildInternalRelationships(table: Table): RelationshipsJson[] {
) )
const tableId = table._id! const tableId = table._id!
for (let link of links) { for (let link of links) {
if (link.type !== FieldType.LINK) {
continue
}
const linkTableId = link.tableId! const linkTableId = link.tableId!
const junctionTableId = generateJunctionTableID(tableId, linkTableId) const junctionTableId = generateJunctionTableID(tableId, linkTableId)
const isFirstTable = tableId > linkTableId const isFirstTable = tableId > linkTableId

View File

@ -1,4 +1,4 @@
import { FieldTypes, FormulaTypes } from "../../../constants" import { FormulaTypes } from "../../../constants"
import { clearColumns } from "./utils" import { clearColumns } from "./utils"
import { doesContainStrings } from "@budibase/string-templates" import { doesContainStrings } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
@ -6,12 +6,20 @@ import isEqual from "lodash/isEqual"
import uniq from "lodash/uniq" import uniq from "lodash/uniq"
import { updateAllFormulasInTable } from "../row/staticFormula" import { updateAllFormulasInTable } from "../row/staticFormula"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { FieldSchema, Table } from "@budibase/types" import {
FieldSchema,
FieldType,
FormulaFieldMetadata,
Table,
} from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { isRelationshipColumn } from "../../../db/utils"
function isStaticFormula(column: FieldSchema) { function isStaticFormula(
column: FieldSchema
): column is FormulaFieldMetadata & { formulaType: FormulaTypes.STATIC } {
return ( return (
column.type === FieldTypes.FORMULA && column.type === FieldType.FORMULA &&
column.formulaType === FormulaTypes.STATIC column.formulaType === FormulaTypes.STATIC
) )
} }
@ -56,8 +64,9 @@ async function checkIfFormulaNeedsCleared(
for (let removed of removedColumns) { for (let removed of removedColumns) {
let tableToUse: Table | undefined = table let tableToUse: Table | undefined = table
// if relationship, get the related table // if relationship, get the related table
if (removed.type === FieldTypes.LINK) { if (removed.type === FieldType.LINK) {
tableToUse = tables.find(table => table._id === removed.tableId) const removedTableId = removed.tableId
tableToUse = tables.find(table => table._id === removedTableId)
} }
if (!tableToUse) { if (!tableToUse) {
continue continue
@ -73,17 +82,18 @@ async function checkIfFormulaNeedsCleared(
} }
for (let relatedTableId of table.relatedFormula) { for (let relatedTableId of table.relatedFormula) {
const relatedColumns = Object.values(table.schema).filter( const relatedColumns = Object.values(table.schema).filter(
column => column.tableId === relatedTableId column =>
column.type === FieldType.LINK && column.tableId === relatedTableId
) )
const relatedTable = tables.find(table => table._id === relatedTableId) const relatedTable = tables.find(table => table._id === relatedTableId)
// look to see if the column was used in a relationship formula, // look to see if the column was used in a relationship formula,
// relationships won't be used for this // relationships won't be used for this
if (relatedTable && relatedColumns && removed.type !== FieldTypes.LINK) { if (relatedTable && relatedColumns && removed.type !== FieldType.LINK) {
let relatedFormulaToRemove: string[] = [] let relatedFormulaToRemove: string[] = []
for (let column of relatedColumns) { for (let column of relatedColumns) {
relatedFormulaToRemove = relatedFormulaToRemove.concat( relatedFormulaToRemove = relatedFormulaToRemove.concat(
getFormulaThatUseColumn(relatedTable, [ getFormulaThatUseColumn(relatedTable, [
column.fieldName!, (column as any).fieldName!,
removed.name, removed.name,
]) ])
) )
@ -116,7 +126,7 @@ async function updateRelatedFormulaLinksOnTables(
const initialTables = cloneDeep(tables) const initialTables = cloneDeep(tables)
// first find the related column names // first find the related column names
const relatedColumns = Object.values(table.schema).filter( const relatedColumns = Object.values(table.schema).filter(
col => col.type === FieldTypes.LINK isRelationshipColumn
) )
// we start by removing the formula field from all tables // we start by removing the formula field from all tables
for (let otherTable of tables) { for (let otherTable of tables) {
@ -135,6 +145,7 @@ async function updateRelatedFormulaLinksOnTables(
if (!columns || columns.length === 0) { if (!columns || columns.length === 0) {
continue continue
} }
const relatedTable = tables.find( const relatedTable = tables.find(
related => related._id === relatedCol.tableId related => related._id === relatedCol.tableId
) )

View File

@ -15,11 +15,16 @@ import { handleRequest } from "../row/external"
import { context, events } from "@budibase/backend-core" import { context, events } from "@budibase/backend-core"
import { isRows, isSchema, parse } from "../../../utilities/schema" import { isRows, isSchema, parse } from "../../../utilities/schema"
import { import {
AutoReason, BulkImportRequest,
BulkImportResponse,
Datasource, Datasource,
FieldSchema, FieldSchema,
ManyToManyRelationshipFieldMetadata,
ManyToOneRelationshipFieldMetadata,
OneToManyRelationshipFieldMetadata,
Operation, Operation,
QueryJson, QueryJson,
RelationshipFieldMetadata,
RelationshipType, RelationshipType,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
@ -74,10 +79,13 @@ function cleanupRelationships(
schema.type === FieldTypes.LINK && schema.type === FieldTypes.LINK &&
(!oldTable || table.schema[key] == null) (!oldTable || table.schema[key] == null)
) { ) {
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find( const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId table => table._id === schemaTableId
) )
const foreignKey = schema.foreignKey const foreignKey =
schema.relationshipType !== RelationshipType.MANY_TO_MANY &&
schema.foreignKey
if (!relatedTable || !foreignKey) { if (!relatedTable || !foreignKey) {
continue continue
} }
@ -116,7 +124,7 @@ function otherRelationshipType(type?: string) {
function generateManyLinkSchema( function generateManyLinkSchema(
datasource: Datasource, datasource: Datasource,
column: FieldSchema, column: ManyToManyRelationshipFieldMetadata,
table: Table, table: Table,
relatedTable: Table relatedTable: Table
): Table { ): Table {
@ -151,10 +159,12 @@ function generateManyLinkSchema(
} }
function generateLinkSchema( function generateLinkSchema(
column: FieldSchema, column:
| OneToManyRelationshipFieldMetadata
| ManyToOneRelationshipFieldMetadata,
table: Table, table: Table,
relatedTable: Table, relatedTable: Table,
type: RelationshipType type: RelationshipType.ONE_TO_MANY | RelationshipType.MANY_TO_ONE
) { ) {
if (!table.primary || !relatedTable.primary) { if (!table.primary || !relatedTable.primary) {
throw new Error("Unable to generate link schema, no primary keys") throw new Error("Unable to generate link schema, no primary keys")
@ -170,20 +180,22 @@ function generateLinkSchema(
} }
function generateRelatedSchema( function generateRelatedSchema(
linkColumn: FieldSchema, linkColumn: RelationshipFieldMetadata,
table: Table, table: Table,
relatedTable: Table, relatedTable: Table,
columnName: string columnName: string
) { ) {
// generate column for other table // generate column for other table
const relatedSchema = cloneDeep(linkColumn) const relatedSchema = cloneDeep(linkColumn)
const isMany2Many =
linkColumn.relationshipType === RelationshipType.MANY_TO_MANY
// swap them from the main link // swap them from the main link
if (linkColumn.foreignKey) { if (!isMany2Many && linkColumn.foreignKey) {
relatedSchema.fieldName = linkColumn.foreignKey relatedSchema.fieldName = linkColumn.foreignKey
relatedSchema.foreignKey = linkColumn.fieldName relatedSchema.foreignKey = linkColumn.fieldName
} }
// is many to many // is many to many
else { else if (isMany2Many) {
// don't need to copy through, already got it // don't need to copy through, already got it
relatedSchema.fieldName = linkColumn.throughTo relatedSchema.fieldName = linkColumn.throughTo
relatedSchema.throughTo = linkColumn.throughFrom relatedSchema.throughTo = linkColumn.throughFrom
@ -197,8 +209,8 @@ function generateRelatedSchema(
table.schema[columnName] = relatedSchema table.schema[columnName] = relatedSchema
} }
function isRelationshipSetup(column: FieldSchema) { function isRelationshipSetup(column: RelationshipFieldMetadata) {
return column.foreignKey || column.through return (column as any).foreignKey || (column as any).through
} }
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
@ -257,14 +269,15 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) { if (schema.type !== FieldTypes.LINK || isRelationshipSetup(schema)) {
continue continue
} }
const schemaTableId = schema.tableId
const relatedTable = Object.values(tables).find( const relatedTable = Object.values(tables).find(
table => table._id === schema.tableId table => table._id === schemaTableId
) )
if (!relatedTable) { if (!relatedTable) {
continue continue
} }
const relatedColumnName = schema.fieldName! const relatedColumnName = schema.fieldName!
const relationType = schema.relationshipType! const relationType = schema.relationshipType
if (relationType === RelationshipType.MANY_TO_MANY) { if (relationType === RelationshipType.MANY_TO_MANY) {
const junctionTable = generateManyLinkSchema( const junctionTable = generateManyLinkSchema(
datasource, datasource,
@ -374,10 +387,12 @@ export async function destroy(ctx: UserCtx) {
return tableToDelete return tableToDelete
} }
export async function bulkImport(ctx: UserCtx) { export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows }: { rows: unknown } = ctx.request.body const { rows } = ctx.request.body
const schema: unknown = table.schema const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) { if (!rows || !isRows(rows) || !isSchema(schema)) {
ctx.throw(400, "Provided data import information is invalid.") ctx.throw(400, "Provided data import information is invalid.")

View File

@ -8,6 +8,8 @@ import {
import { isExternalTable, isSQL } from "../../../integrations/utils" import { isExternalTable, isSQL } from "../../../integrations/utils"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
import { import {
BulkImportRequest,
BulkImportResponse,
FetchTablesResponse, FetchTablesResponse,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -18,7 +20,7 @@ import {
import sdk from "../../../sdk" import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv" import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets" import { builderSocket } from "../../../websockets"
import { cloneDeep } from "lodash" import { cloneDeep, isEqual } from "lodash"
function pickApi({ tableId, table }: { tableId?: string; table?: Table }) { function pickApi({ tableId, table }: { tableId?: string; table?: Table }) {
if (table && !tableId) { if (table && !tableId) {
@ -97,9 +99,17 @@ export async function destroy(ctx: UserCtx) {
builderSocket?.emitTableDeletion(ctx, deletedTable) builderSocket?.emitTableDeletion(ctx, deletedTable)
} }
export async function bulkImport(ctx: UserCtx) { export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const tableId = ctx.params.tableId const tableId = ctx.params.tableId
await pickApi({ tableId }).bulkImport(ctx) let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
// right now we don't trigger anything for bulk import because it // right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to // can only be done in the builder, but in the future we may need to
// think about events for bulk items // think about events for bulk items

View File

@ -10,6 +10,8 @@ import {
} from "../../../utilities/rowProcessor" } from "../../../utilities/rowProcessor"
import { runStaticFormulaChecks } from "./bulkFormula" import { runStaticFormulaChecks } from "./bulkFormula"
import { import {
BulkImportRequest,
BulkImportResponse,
RenameColumn, RenameColumn,
SaveTableRequest, SaveTableRequest,
SaveTableResponse, SaveTableResponse,
@ -78,10 +80,10 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
// make sure that types don't change of a column, have to remove // make sure that types don't change of a column, have to remove
// the column if you want to change the type // the column if you want to change the type
if (oldTable && oldTable.schema) { if (oldTable && oldTable.schema) {
for (let propKey of Object.keys(tableToSave.schema)) { for (const propKey of Object.keys(tableToSave.schema)) {
let oldColumn = oldTable.schema[propKey] let oldColumn = oldTable.schema[propKey]
if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) { if (oldColumn && oldColumn.type === FieldTypes.INTERNAL) {
oldColumn.type = FieldTypes.AUTO oldTable.schema[propKey].type = FieldTypes.AUTO
} }
} }
} }
@ -206,7 +208,9 @@ export async function destroy(ctx: any) {
return tableToDelete return tableToDelete
} }
export async function bulkImport(ctx: any) { export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const table = await sdk.tables.getTable(ctx.params.tableId) const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body const { rows, identifierFields } = ctx.request.body
await handleDataImport(ctx.user, table, rows, identifierFields) await handleDataImport(ctx.user, table, rows, identifierFields)

View File

@ -20,8 +20,14 @@ import viewTemplate from "../view/viewBuilder"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { events, context } from "@budibase/backend-core" import { events, context } from "@budibase/backend-core"
import { ContextUser, Datasource, SourceName, Table } from "@budibase/types"
import { addTableToSqlite } from "./sqlite" import { addTableToSqlite } from "./sqlite"
import {
ContextUser,
Datasource,
Row,
SourceName,
Table,
} from "@budibase/types"
export async function clearColumns(table: any, columnNames: any) { export async function clearColumns(table: any, columnNames: any) {
const db = context.getAppDB() const db = context.getAppDB()
@ -145,12 +151,12 @@ export async function importToRows(
} }
export async function handleDataImport( export async function handleDataImport(
user: any, user: ContextUser,
table: any, table: Table,
rows: any, rows: Row[],
identifierFields: Array<string> = [] identifierFields: Array<string> = []
) { ) {
const schema: unknown = table.schema const schema = table.schema
if (!rows || !isRows(rows) || !isSchema(schema)) { if (!rows || !isRows(rows) || !isSchema(schema)) {
return table return table

View File

@ -43,3 +43,7 @@ export enum Format {
export function isFormat(format: any): format is Format { export function isFormat(format: any): format is Format {
return Object.values(Format).includes(format as Format) return Object.values(Format).includes(format as Format)
} }
export function parseCsvExport<T>(value: string) {
return JSON.parse(value?.replace(/'/g, '"')) as T
}

View File

@ -23,7 +23,10 @@ describe("/applications/:appId/import", () => {
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.message).toBe("app updated") const appPackage = await config.api.application.get(appId!)
expect(appPackage.navigation?.links?.length).toBe(2)
expect(expect(appPackage.navigation?.links?.[0].url).toBe("/blank"))
expect(expect(appPackage.navigation?.links?.[1].url).toBe("/derp"))
const screens = await config.api.screen.list() const screens = await config.api.screen.list()
expect(screens.length).toBe(2) expect(screens.length).toBe(2)
expect(screens[0].routing.route).toBe("/derp") expect(screens[0].routing.route).toBe("/derp")

View File

@ -37,7 +37,7 @@ describe("/datasources", () => {
.expect(200) .expect(200)
expect(res.body.datasource.name).toEqual("Test") expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toBeUndefined() expect(res.body.errors).toEqual({})
expect(events.datasource.created).toBeCalledTimes(1) expect(events.datasource.created).toBeCalledTimes(1)
}) })
}) })

Some files were not shown because too many files have changed in this diff Show More