Merge branch 'master' into feature/buttongroup-component

This commit is contained in:
Michael Drury 2023-10-17 14:08:26 +01:00 committed by GitHub
commit e1ba96c5d0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
71 changed files with 2515 additions and 2180 deletions

View File

@ -1,9 +1,14 @@
packages/server/node_modules *
packages/builder !/packages/
packages/frontend-core !/scripts/
packages/backend-core /packages/*/node_modules
packages/worker/node_modules packages/server/scripts/
packages/cli !packages/server/scripts/integrations/oracle
packages/client !nx.json
packages/bbui !/hosting/single/
packages/string-templates !/hosting/letsencrypt /
!package.json
!yarn.lock
!lerna.json
!.yarnrc

View File

@ -10,7 +10,6 @@ on:
push: push:
branches: branches:
- master - master
- develop
pull_request: pull_request:
workflow_dispatch: workflow_dispatch:
@ -262,11 +261,7 @@ jobs:
branch="${{ github.base_ref || github.ref_name }}" branch="${{ github.base_ref || github.ref_name }}"
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})" echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
if [[ $branch == "master" ]]; then base_commit=$(git rev-parse origin/master)
base_commit=$(git rev-parse origin/master)
elif [[ $branch == "develop" ]]; then
base_commit=$(git rev-parse origin/develop)
fi
if [[ ! -z $base_commit ]]; then if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch" echo "target_branch=$branch"

View File

@ -4,7 +4,13 @@ on:
pull_request: pull_request:
types: [closed] types: [closed]
branches: branches:
- develop - master
workflow_dispatch:
inputs:
BRANCH:
type: string
description: Which featurebranch branch to destroy?
required: true
jobs: jobs:
release: release:
@ -13,8 +19,8 @@ jobs:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- uses: passeidireto/trigger-external-workflow-action@main - uses: passeidireto/trigger-external-workflow-action@main
env: env:
PAYLOAD_BRANCH: ${{ github.head_ref }} PAYLOAD_BRANCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.BRANCH || github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.ref }} PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys
event: featurebranch-qa-close event: featurebranch-qa-close

View File

@ -3,7 +3,6 @@ name: deploy-featurebranch
on: on:
pull_request: pull_request:
branches: branches:
- develop
- master - master
jobs: jobs:

View File

@ -1,41 +0,0 @@
name: "deploy-preprod"
on:
workflow_dispatch:
workflow_call:
jobs:
deploy-to-legacy-preprod-env:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-preprod-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -1,124 +0,0 @@
name: Budibase Prerelease
concurrency:
group: release-prerelease
cancel-in-progress: false
on:
push:
tags:
- "*-alpha.*"
workflow_dispatch:
env:
# Posthog token used by ui at build time
# disable unless needed for testing
# POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
jobs:
release-images:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not develop
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/develop; then
echo "Tag is not in develop"
exit 1
fi
- uses: actions/setup-node@v1
with:
node-version: 18.x
- run: yarn install --frozen-lockfile
- name: Update versions
run: ./scripts/updateVersions.sh
- run: yarn build
- run: yarn build:sdk
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
git submodule foreach git commit -a -m 'Release process'
git commit -a -m 'Release process'
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release:develop
- name: Build/release Docker images
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:develop
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
release-helm-chart:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version 0.0.0-develop --app-version develop --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: develop"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -110,19 +110,13 @@ jobs:
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}" git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push git push
deploy-to-legacy-preprod-env:
needs: [release-images]
uses: ./.github/workflows/deploy-preprod.yml
secrets: inherit
# Trigger deploy to new EKS preprod environment trigger-deploy-to-qa-env:
trigger-deploy-to-preprod-env:
needs: [release-helm-chart] needs: [release-helm-chart]
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Get the current budibase release version
- name: Get the latest budibase release version
id: version id: version
run: | run: |
release_version=$(cat lerna.json | jq -r '.version') release_version=$(cat lerna.json | jq -r '.version')
@ -133,5 +127,5 @@ jobs:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }} PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys
event: budicloud-preprod-deploy event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }} github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -0,0 +1,69 @@
name: Test
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: "Checkout"
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: "yarn"
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Run Yarn
run: yarn
- name: Run Yarn Build
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
cache-to: type=inline
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View File

@ -1,42 +0,0 @@
name: Tag prerelease
concurrency:
group: tag-prerelease
cancel-in-progress: false
on:
push:
branches:
- develop
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch:
jobs:
tag-prerelease:
runs-on: ubuntu-latest
steps:
- name: Fail if branch is not develop
if: github.ref != 'refs/heads/develop'
run: |
echo "Ref is not develop, you must run this job from develop."
exit 1
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- run: cd scripts && yarn
- name: Tag prerelease
run: |
cd scripts
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
./versionCommit.sh prerelease

View File

@ -4,17 +4,6 @@ concurrency:
cancel-in-progress: false cancel-in-progress: false
on: on:
push:
branches:
- master
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch: workflow_dispatch:
inputs: inputs:
versioning: versioning:

View File

@ -1 +1 @@
network-timeout 100000 network-timeout 1000000

View File

@ -138,6 +138,8 @@ To develop the Budibase platform you'll need [Docker](https://www.docker.com/) a
`yarn setup` will check that all necessary components are installed and setup the repo for usage. `yarn setup` will check that all necessary components are installed and setup the repo for usage.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above command.
##### Manual method ##### Manual method
The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed). The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed).
@ -146,6 +148,8 @@ The following commands can be executed to manually get Budibase up and running (
`yarn build` will build all budibase packages. `yarn build` will build all budibase packages.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above commands.
#### 4. Running #### 4. Running
To run the budibase server and builder in dev mode (i.e. with live reloading): To run the budibase server and builder in dev mode (i.e. with live reloading):

View File

@ -0,0 +1,126 @@
FROM node:18-slim as build
# install node-gyp dependencies
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
# copy and install dependencies
WORKDIR /app
COPY package.json .
COPY yarn.lock .
COPY lerna.json .
COPY .yarnrc .
COPY packages/server/package.json packages/server/package.json
COPY packages/worker/package.json packages/worker/package.json
# string-templates does not get bundled during the esbuild process, so we want to use the local version
COPY packages/string-templates/package.json packages/string-templates/package.json
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code
COPY packages/server/dist packages/server/dist
COPY packages/server/pm2.config.js packages/server/pm2.config.js
COPY packages/server/client packages/server/client
COPY packages/server/builder packages/server/builder
COPY packages/worker/dist packages/worker/dist
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https gpg -y
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx
COPY hosting/single/nginx/nginx-default-site.conf /etc/nginx/sites-enabled/default
RUN mkdir -p /var/log/nginx && \
touch /var/log/nginx/error.log && \
touch /var/run/nginx.pid && \
usermod -a -G tty www-data
WORKDIR /
RUN mkdir -p scripts/integrations/oracle
COPY packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup minio
WORKDIR /minio
COPY scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup runner file
WORKDIR /
COPY hosting/single/runner.sh .
RUN chmod +x ./runner.sh
COPY hosting/single/healthcheck.sh .
RUN chmod +x ./healthcheck.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
COPY hosting/single/ssh/sshd_config /etc/
COPY hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# setup letsencrypt certificate
RUN apt-get install -y certbot python3-certbot-nginx
COPY hosting/letsencrypt /app/letsencrypt
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
COPY --from=build /app/node_modules /node_modules
COPY --from=build /app/package.json /package.json
COPY --from=build /app/packages/server /app
COPY --from=build /app/packages/worker /worker
COPY --from=build /app/packages/string-templates /string-templates
RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"
# must set this just before running
ENV NODE_ENV=production
WORKDIR /
CMD ["./runner.sh"]

View File

@ -7,16 +7,16 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION [[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80 [[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker [[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000 [[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://127.0.0.1:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production [[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU [[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR" [[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app [[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379 [[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1 [[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002 [[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002 [[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001 [[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app [[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
# export CUSTOM_DOMAIN=budi001.custom.com # export CUSTOM_DOMAIN=budi001.custom.com
@ -51,7 +51,7 @@ do
fi fi
done done
if [[ -z "${COUCH_DB_URL}" ]]; then if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984 export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
fi fi
if [ ! -f "${DATA_DIR}/.env" ]; then if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env touch ${DATA_DIR}/.env

View File

@ -1,5 +1,5 @@
{ {
"version": "2.11.34", "version": "2.11.35",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*" "packages/*"

View File

@ -8,5 +8,9 @@
} }
} }
}, },
"targetDefaults": {} "targetDefaults": {
"build": {
"inputs": ["{workspaceRoot}/scripts/build.js"]
}
}
} }

View File

@ -3,14 +3,11 @@
"private": true, "private": true,
"devDependencies": { "devDependencies": {
"@esbuild-plugins/tsconfig-paths": "^0.1.2", "@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@nx/js": "16.4.3",
"@rollup/plugin-json": "^4.0.2",
"@typescript-eslint/parser": "6.7.2", "@typescript-eslint/parser": "6.7.2",
"esbuild": "^0.18.17", "esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0", "esbuild-node-externals": "^1.8.0",
"eslint": "^8.44.0", "eslint": "^8.44.0",
"husky": "^8.0.3", "husky": "^8.0.3",
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1", "kill-port": "^1.6.1",
"lerna": "7.1.1", "lerna": "7.1.1",
"madge": "^6.0.0", "madge": "^6.0.0",
@ -19,8 +16,6 @@
"nx-cloud": "16.0.5", "nx-cloud": "16.0.5",
"prettier": "2.8.8", "prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0", "prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
"rollup-plugin-replace": "^2.2.0",
"svelte": "3.49.0", "svelte": "3.49.0",
"typescript": "5.2.2", "typescript": "5.2.2",
"@babel/core": "^7.22.5", "@babel/core": "^7.22.5",
@ -51,7 +46,7 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker", "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server", "dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "dev:docker": "yarn build && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream", "test": "lerna run --stream test --stream",
"lint:eslint": "eslint packages qa-core --max-warnings=0", "lint:eslint": "eslint packages qa-core --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
@ -61,7 +56,6 @@
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint", "lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs", "build:specs": "lerna run --stream specs",
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -", "build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:pre": "yarn build && lerna run --stream predocker",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service", "build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -", "build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -", "build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
@ -69,8 +63,7 @@
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild", "build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -", "build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .", "build:docker:single": "./scripts/build-single-image.sh",
"build:docker:single": "yarn build && lerna run --concurrency 1 predocker && yarn build:docker:single:image",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting", "build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb", "publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting", "publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",

View File

@ -26,7 +26,7 @@
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"@techpass/passport-openidconnect": "0.3.2", "@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0", "aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",

View File

@ -1,5 +1,5 @@
import env from "../environment" import env from "../environment"
const cfsign = require("aws-cloudfront-sign") import * as cfsign from "aws-cloudfront-sign"
let PRIVATE_KEY: string | undefined let PRIVATE_KEY: string | undefined
@ -21,7 +21,7 @@ function getPrivateKey() {
const getCloudfrontSignParams = () => { const getCloudfrontSignParams = () => {
return { return {
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID, keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID!,
privateKeyString: getPrivateKey(), privateKeyString: getPrivateKey(),
expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour
} }

View File

@ -6,6 +6,7 @@ import {
AutomationStepIdArray, AutomationStepIdArray,
AutomationIOType, AutomationIOType,
AutomationCustomIOType, AutomationCustomIOType,
DatasourceFeature,
} from "@budibase/types" } from "@budibase/types"
import joi from "joi" import joi from "joi"
@ -67,9 +68,27 @@ function validateDatasource(schema: any) {
version: joi.string().optional(), version: joi.string().optional(),
schema: joi.object({ schema: joi.object({
docs: joi.string(), docs: joi.string(),
plus: joi.boolean().optional(),
isSQL: joi.boolean().optional(),
auth: joi
.object({
type: joi.string().required(),
})
.optional(),
features: joi
.object(
Object.fromEntries(
Object.values(DatasourceFeature).map(key => [
key,
joi.boolean().optional(),
])
)
)
.optional(),
relationships: joi.boolean().optional(),
description: joi.string().required(),
friendlyName: joi.string().required(), friendlyName: joi.string().required(),
type: joi.string().allow(...DATASOURCE_TYPES), type: joi.string().allow(...DATASOURCE_TYPES),
description: joi.string().required(),
datasource: joi.object().pattern(joi.string(), fieldValidator).required(), datasource: joi.object().pattern(joi.string(), fieldValidator).required(),
query: joi query: joi
.object() .object()

View File

@ -82,9 +82,9 @@
"@spectrum-css/vars": "3.0.1", "@spectrum-css/vars": "3.0.1",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"easymde": "^2.16.1", "easymde": "^2.16.1",
"svelte-dnd-action": "^0.9.8",
"svelte-flatpickr": "3.2.3", "svelte-flatpickr": "3.2.3",
"svelte-portal": "^1.0.0", "svelte-portal": "^1.0.0"
"svelte-dnd-action": "^0.9.8"
}, },
"resolutions": { "resolutions": {
"loader-utils": "1.4.1" "loader-utils": "1.4.1"

View File

@ -1,8 +0,0 @@
const ncp = require("ncp").ncp
ncp("./dist", "../server/builder", function (err) {
if (err) {
return console.error(err)
}
console.log("Copied dist folder to ../server/builder")
})

View File

@ -85,8 +85,8 @@
"@babel/core": "^7.12.14", "@babel/core": "^7.12.14",
"@babel/plugin-transform-runtime": "^7.13.10", "@babel/plugin-transform-runtime": "^7.13.10",
"@babel/preset-env": "^7.13.12", "@babel/preset-env": "^7.13.12",
"@rollup/plugin-replace": "^2.4.2", "@rollup/plugin-replace": "^5.0.3",
"@roxi/routify": "2.18.5", "@roxi/routify": "2.18.12",
"@sveltejs/vite-plugin-svelte": "1.0.1", "@sveltejs/vite-plugin-svelte": "1.0.1",
"@testing-library/jest-dom": "5.17.0", "@testing-library/jest-dom": "5.17.0",
"@testing-library/svelte": "^3.2.2", "@testing-library/svelte": "^3.2.2",
@ -95,16 +95,18 @@
"jest": "29.6.2", "jest": "29.6.2",
"jsdom": "^21.1.1", "jsdom": "^21.1.1",
"ncp": "^2.0.0", "ncp": "^2.0.0",
"rollup": "^2.44.0",
"svelte": "^3.48.0", "svelte": "^3.48.0",
"svelte-jester": "^1.3.2", "svelte-jester": "^1.3.2",
"vite": "^3.0.8", "vite": "^4.4.11",
"vite-plugin-static-copy": "^0.16.0", "vite-plugin-static-copy": "^0.17.0",
"vitest": "^0.29.2" "vitest": "^0.29.2"
}, },
"nx": { "nx": {
"targets": { "targets": {
"build": { "build": {
"outputs": [
"{workspaceRoot}/packages/server/builder"
],
"dependsOn": [ "dependsOn": [
{ {
"projects": [ "projects": [

View File

@ -36,7 +36,7 @@
import { FieldType, FieldSubtype, SourceName } from "@budibase/types" import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte" import RelationshipSelector from "components/common/RelationshipSelector.svelte"
const AUTO_TYPE = "auto" const AUTO_TYPE = FIELDS.AUTO.type
const FORMULA_TYPE = FIELDS.FORMULA.type const FORMULA_TYPE = FIELDS.FORMULA.type
const LINK_TYPE = FIELDS.LINK.type const LINK_TYPE = FIELDS.LINK.type
const STRING_TYPE = FIELDS.STRING.type const STRING_TYPE = FIELDS.STRING.type
@ -60,8 +60,13 @@
{} {}
) )
function makeFieldId(type, subtype) { function makeFieldId(type, subtype, autocolumn) {
return `${type}${subtype || ""}`.toUpperCase() // don't make field IDs for auto types
if (type === AUTO_TYPE || autocolumn) {
return type.toUpperCase()
} else {
return `${type}${subtype || ""}`.toUpperCase()
}
} }
let originalName let originalName
@ -183,7 +188,8 @@
if (!savingColumn) { if (!savingColumn) {
editableColumn.fieldId = makeFieldId( editableColumn.fieldId = makeFieldId(
editableColumn.type, editableColumn.type,
editableColumn.subtype editableColumn.subtype,
editableColumn.autocolumn
) )
allowedTypes = getAllowedTypes().map(t => ({ allowedTypes = getAllowedTypes().map(t => ({
@ -419,7 +425,7 @@
FIELDS.FORMULA, FIELDS.FORMULA,
FIELDS.JSON, FIELDS.JSON,
isUsers ? FIELDS.USERS : FIELDS.USER, isUsers ? FIELDS.USERS : FIELDS.USER,
{ name: "Auto Column", type: AUTO_TYPE }, FIELDS.AUTO,
] ]
} else { } else {
let fields = [ let fields = [
@ -538,7 +544,7 @@
getOptionValue={field => field.fieldId} getOptionValue={field => field.fieldId}
getOptionIcon={field => field.icon} getOptionIcon={field => field.icon}
isOptionEnabled={option => { isOptionEnabled={option => {
if (option.type == AUTO_TYPE) { if (option.type === AUTO_TYPE) {
return availableAutoColumnKeys?.length > 0 return availableAutoColumnKeys?.length > 0
} }
return true return true

View File

@ -13,6 +13,8 @@
import { Helpers } from "@budibase/bbui" import { Helpers } from "@budibase/bbui"
import { RelationshipErrorChecker } from "./relationshipErrors" import { RelationshipErrorChecker } from "./relationshipErrors"
import { onMount } from "svelte" import { onMount } from "svelte"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
import { PrettyRelationshipDefinitions } from "constants/backend"
export let save export let save
export let datasource export let datasource
@ -22,16 +24,21 @@
export let selectedFromTable export let selectedFromTable
export let close export let close
const relationshipTypes = [ let relationshipMap = {
{ [RelationshipType.MANY_TO_MANY]: {
label: "One to Many", part1: PrettyRelationshipDefinitions.MANY,
value: RelationshipType.MANY_TO_ONE, part2: PrettyRelationshipDefinitions.MANY,
}, },
{ [RelationshipType.MANY_TO_ONE]: {
label: "Many to Many", part1: PrettyRelationshipDefinitions.ONE,
value: RelationshipType.MANY_TO_MANY, part2: PrettyRelationshipDefinitions.MANY,
}, },
] }
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
let originalFromColumnName = toRelationship.name, let originalFromColumnName = toRelationship.name,
originalToColumnName = fromRelationship.name originalToColumnName = fromRelationship.name
@ -49,14 +56,32 @@
) )
let errors = {} let errors = {}
let fromPrimary, fromForeign, fromColumn, toColumn let fromPrimary, fromForeign, fromColumn, toColumn
let fromId, toId, throughId, throughToKey, throughFromKey
let throughId, throughToKey, throughFromKey
let isManyToMany, isManyToOne, relationshipType let isManyToMany, isManyToOne, relationshipType
let hasValidated = false let hasValidated = false
$: fromId = null
$: toId = null
$: tableOptions = plusTables.map(table => ({ $: tableOptions = plusTables.map(table => ({
label: table.name, label: table.name,
value: table._id, value: table._id,
name: table.name,
_id: table._id,
})) }))
$: {
// Determine the relationship type based on the selected values of both parts
relationshipType = Object.entries(relationshipMap).find(
([_, parts]) =>
parts.part1 === relationshipPart1 && parts.part2 === relationshipPart2
)?.[0]
changed(() => {
hasValidated = false
})
}
$: valid = $: valid =
getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType) getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType)
$: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY $: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY
@ -338,33 +363,34 @@
onConfirm={saveRelationship} onConfirm={saveRelationship}
disabled={!valid} disabled={!valid}
> >
<Select
label="Relationship type"
options={relationshipTypes}
bind:value={relationshipType}
bind:error={errors.relationshipType}
on:change={() =>
changed(() => {
hasValidated = false
})}
/>
<div class="headings"> <div class="headings">
<Detail>Tables</Detail> <Detail>Tables</Detail>
</div> </div>
{#if !selectedFromTable}
<Select <RelationshipSelector
label="Select from table" bind:relationshipPart1
options={tableOptions} bind:relationshipPart2
bind:value={fromId} bind:relationshipTableIdPrimary={fromId}
bind:error={errors.fromTable} bind:relationshipTableIdSecondary={toId}
on:change={e => {relationshipOpts1}
changed(() => { {relationshipOpts2}
const table = plusTables.find(tbl => tbl._id === e.detail) {tableOptions}
fromColumn = table?.name || "" {errors}
fromPrimary = table?.primary?.[0] primaryDisabled={selectedFromTable}
})} primaryTableChanged={e =>
/> changed(() => {
{/if} const table = plusTables.find(tbl => tbl._id === e.detail)
fromColumn = table?.name || ""
fromPrimary = table?.primary?.[0]
})}
secondaryTableChanged={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{#if isManyToOne && fromId} {#if isManyToOne && fromId}
<Select <Select
label={`Primary Key (${getTable(fromId).name})`} label={`Primary Key (${getTable(fromId).name})`}
@ -374,18 +400,6 @@
on:change={changed} on:change={changed}
/> />
{/if} {/if}
<Select
label={"Select to table"}
options={tableOptions}
bind:value={toId}
bind:error={errors.toTable}
on:change={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{#if isManyToMany} {#if isManyToMany}
<Select <Select
label={"Through"} label={"Through"}

View File

@ -57,7 +57,7 @@
{#if $store.error} {#if $store.error}
<InlineAlert <InlineAlert
type="error" type="error"
header={$store.error.title} header="Error fetching {tableType}"
message={$store.error.description} message={$store.error.description}
/> />
{/if} {/if}

View File

@ -1,6 +1,6 @@
import { derived, writable, get } from "svelte/store" import { derived, writable, get } from "svelte/store"
import { keepOpen, notifications } from "@budibase/bbui" import { keepOpen, notifications } from "@budibase/bbui"
import { datasources, ImportTableError, tables } from "stores/backend" import { datasources, tables } from "stores/backend"
export const createTableSelectionStore = (integration, datasource) => { export const createTableSelectionStore = (integration, datasource) => {
const tableNamesStore = writable([]) const tableNamesStore = writable([])
@ -30,12 +30,7 @@ export const createTableSelectionStore = (integration, datasource) => {
notifications.success(`Tables fetched successfully.`) notifications.success(`Tables fetched successfully.`)
await onComplete() await onComplete()
} catch (err) { } catch (err) {
if (err instanceof ImportTableError) { errorStore.set(err)
errorStore.set(err)
} else {
notifications.error("Error fetching tables.")
}
return keepOpen return keepOpen
} }
} }

View File

@ -6,11 +6,14 @@
export let relationshipTableIdPrimary export let relationshipTableIdPrimary
export let relationshipTableIdSecondary export let relationshipTableIdSecondary
export let editableColumn export let editableColumn
export let linkEditDisabled export let linkEditDisabled = false
export let tableOptions export let tableOptions
export let errors export let errors
export let relationshipOpts1 export let relationshipOpts1
export let relationshipOpts2 export let relationshipOpts2
export let primaryTableChanged
export let secondaryTableChanged
export let primaryDisabled = true
</script> </script>
<div class="relationship-container"> <div class="relationship-container">
@ -19,16 +22,19 @@
disabled={linkEditDisabled} disabled={linkEditDisabled}
bind:value={relationshipPart1} bind:value={relationshipPart1}
options={relationshipOpts1} options={relationshipOpts1}
bind:error={errors.relationshipType}
/> />
</div> </div>
<div class="relationship-label">in</div> <div class="relationship-label">in</div>
<div class="relationship-part"> <div class="relationship-part">
<Select <Select
disabled disabled={primaryDisabled}
options={tableOptions} options={tableOptions}
getOptionLabel={table => table.name} getOptionLabel={table => table.name}
getOptionValue={table => table._id} getOptionValue={table => table._id}
bind:value={relationshipTableIdPrimary} bind:value={relationshipTableIdPrimary}
on:change={primaryTableChanged}
bind:error={errors.fromTable}
/> />
</div> </div>
</div> </div>
@ -46,20 +52,24 @@
<Select <Select
disabled={linkEditDisabled} disabled={linkEditDisabled}
bind:value={relationshipTableIdSecondary} bind:value={relationshipTableIdSecondary}
bind:error={errors.toTable}
options={tableOptions.filter( options={tableOptions.filter(
table => table._id !== relationshipTableIdPrimary table => table._id !== relationshipTableIdPrimary
)} )}
getOptionLabel={table => table.name} getOptionLabel={table => table.name}
getOptionValue={table => table._id} getOptionValue={table => table._id}
on:change={secondaryTableChanged}
/> />
</div> </div>
</div> </div>
<Input {#if editableColumn}
disabled={linkEditDisabled} <Input
label={`Column name in other table`} disabled={linkEditDisabled}
bind:value={editableColumn.fieldName} label={`Column name in other table`}
error={errors.relatedName} bind:value={editableColumn.fieldName}
/> error={errors.relatedName}
/>
{/if}
<style> <style>
.relationship-container { .relationship-container {

View File

@ -54,6 +54,7 @@
label="App export" label="App export"
on:change={e => { on:change={e => {
file = e.detail?.[0] file = e.detail?.[0]
encrypted = file?.name?.endsWith(".enc.tar.gz")
}} }}
/> />
<Toggle text="Encrypted" bind:value={encrypted} /> <Toggle text="Encrypted" bind:value={encrypted} />

View File

@ -1,5 +1,21 @@
import { FieldType, FieldSubtype } from "@budibase/types" import { FieldType, FieldSubtype } from "@budibase/types"
export const AUTO_COLUMN_SUB_TYPES = {
AUTO_ID: "autoID",
CREATED_BY: "createdBy",
CREATED_AT: "createdAt",
UPDATED_BY: "updatedBy",
UPDATED_AT: "updatedAt",
}
export const AUTO_COLUMN_DISPLAY_NAMES = {
AUTO_ID: "Auto ID",
CREATED_BY: "Created By",
CREATED_AT: "Created At",
UPDATED_BY: "Updated By",
UPDATED_AT: "Updated At",
}
export const FIELDS = { export const FIELDS = {
STRING: { STRING: {
name: "Text", name: "Text",
@ -107,6 +123,12 @@ export const FIELDS = {
presence: false, presence: false,
}, },
}, },
AUTO: {
name: "Auto Column",
type: FieldType.AUTO,
icon: "MagicWand",
constraints: {},
},
FORMULA: { FORMULA: {
name: "Formula", name: "Formula",
type: FieldType.FORMULA, type: FieldType.FORMULA,
@ -139,22 +161,6 @@ export const FIELDS = {
}, },
} }
export const AUTO_COLUMN_SUB_TYPES = {
AUTO_ID: "autoID",
CREATED_BY: "createdBy",
CREATED_AT: "createdAt",
UPDATED_BY: "updatedBy",
UPDATED_AT: "updatedAt",
}
export const AUTO_COLUMN_DISPLAY_NAMES = {
AUTO_ID: "Auto ID",
CREATED_BY: "Created By",
CREATED_AT: "Created At",
UPDATED_BY: "Updated By",
UPDATED_AT: "Updated At",
}
export const FILE_TYPES = { export const FILE_TYPES = {
IMAGE: ["png", "tiff", "gif", "raw", "jpg", "jpeg"], IMAGE: ["png", "tiff", "gif", "raw", "jpg", "jpeg"],
CODE: ["js", "rs", "py", "java", "rb", "hs", "yml"], CODE: ["js", "rs", "py", "java", "rb", "hs", "yml"],

View File

@ -62,7 +62,14 @@
</div> </div>
{/if} {/if}
<div class="truncate"> <div class="truncate">
<Body>{getSubtitle(datasource)}</Body> <Body>
{@const subtitle = getSubtitle(datasource)}
{#if subtitle}
{subtitle}
{:else}
{Object.values(datasource.config).join(" / ")}
{/if}
</Body>
</div> </div>
</div> </div>
<div class="right"> <div class="right">

View File

@ -13,7 +13,7 @@
import ExportAppModal from "components/start/ExportAppModal.svelte" import ExportAppModal from "components/start/ExportAppModal.svelte"
import ImportAppModal from "components/start/ImportAppModal.svelte" import ImportAppModal from "components/start/ImportAppModal.svelte"
$: filteredApps = $apps.filter(app => app.devId == $store.appId) $: filteredApps = $apps.filter(app => app.devId === $store.appId)
$: app = filteredApps.length ? filteredApps[0] : {} $: app = filteredApps.length ? filteredApps[0] : {}
$: appDeployed = app?.status === AppStatus.DEPLOYED $: appDeployed = app?.status === AppStatus.DEPLOYED

View File

@ -9,15 +9,19 @@ import { API } from "api"
import { DatasourceFeature } from "@budibase/types" import { DatasourceFeature } from "@budibase/types"
import { TableNames } from "constants" import { TableNames } from "constants"
export class ImportTableError extends Error { class TableImportError extends Error {
constructor(message) { constructor(errors) {
super(message) super()
const [title, description] = message.split(" - ") this.name = "TableImportError"
this.errors = errors
}
this.name = "TableSelectionError" get description() {
// Capitalize the first character of both the title and description let message = ""
this.title = title[0].toUpperCase() + title.substr(1) for (const key in this.errors) {
this.description = description[0].toUpperCase() + description.substr(1) message += `${key}: ${this.errors[key]}\n`
}
return message
} }
} }
@ -25,7 +29,6 @@ export function createDatasourcesStore() {
const store = writable({ const store = writable({
list: [], list: [],
selectedDatasourceId: null, selectedDatasourceId: null,
schemaError: null,
}) })
const derivedStore = derived([store, tables], ([$store, $tables]) => { const derivedStore = derived([store, tables], ([$store, $tables]) => {
@ -75,18 +78,13 @@ export function createDatasourcesStore() {
store.update(state => ({ store.update(state => ({
...state, ...state,
selectedDatasourceId: id, selectedDatasourceId: id,
// Remove any possible schema error
schemaError: null,
})) }))
} }
const updateDatasource = response => { const updateDatasource = response => {
const { datasource, error } = response const { datasource, errors } = response
if (error) { if (errors && Object.keys(errors).length > 0) {
store.update(state => ({ throw new TableImportError(errors)
...state,
schemaError: error,
}))
} }
replaceDatasource(datasource._id, datasource) replaceDatasource(datasource._id, datasource)
select(datasource._id) select(datasource._id)
@ -94,20 +92,11 @@ export function createDatasourcesStore() {
} }
const updateSchema = async (datasource, tablesFilter) => { const updateSchema = async (datasource, tablesFilter) => {
try { const response = await API.buildDatasourceSchema({
const response = await API.buildDatasourceSchema({ datasourceId: datasource?._id,
datasourceId: datasource?._id, tablesFilter,
tablesFilter, })
}) updateDatasource(response)
updateDatasource(response)
} catch (e) {
// buildDatasourceSchema call returns user presentable errors with two parts divided with a " - ".
if (e.message.split(" - ").length === 2) {
throw new ImportTableError(e.message)
} else {
throw e
}
}
} }
const sourceCount = source => { const sourceCount = source => {
@ -136,6 +125,7 @@ export function createDatasourcesStore() {
config, config,
name: `${integration.friendlyName}${nameModifier}`, name: `${integration.friendlyName}${nameModifier}`,
plus: integration.plus && integration.name !== IntegrationTypes.REST, plus: integration.plus && integration.name !== IntegrationTypes.REST,
isSQL: integration.isSQL,
} }
if (await checkDatasourceValidity(integration, datasource)) { if (await checkDatasourceValidity(integration, datasource)) {
@ -171,12 +161,6 @@ export function createDatasourcesStore() {
replaceDatasource(datasource._id, null) replaceDatasource(datasource._id, null)
} }
const removeSchemaError = () => {
store.update(state => {
return { ...state, schemaError: null }
})
}
const replaceDatasource = (datasourceId, datasource) => { const replaceDatasource = (datasourceId, datasource) => {
if (!datasourceId) { if (!datasourceId) {
return return
@ -229,7 +213,6 @@ export function createDatasourcesStore() {
create, create,
update, update,
delete: deleteDatasource, delete: deleteDatasource,
removeSchemaError,
replaceDatasource, replaceDatasource,
getTableNames, getTableNames,
} }

View File

@ -4,7 +4,7 @@ export { views } from "./views"
export { viewsV2 } from "./viewsV2" export { viewsV2 } from "./viewsV2"
export { permissions } from "./permissions" export { permissions } from "./permissions"
export { roles } from "./roles" export { roles } from "./roles"
export { datasources, ImportTableError } from "./datasources" export { datasources } from "./datasources"
export { integrations } from "./integrations" export { integrations } from "./integrations"
export { sortedIntegrations } from "./sortedIntegrations" export { sortedIntegrations } from "./sortedIntegrations"
export { queries } from "./queries" export { queries } from "./queries"

View File

@ -5788,6 +5788,21 @@
} }
] ]
}, },
{
"type": "event",
"label": "On row click",
"key": "onRowClick",
"context": [
{
"label": "Clicked row",
"key": "row"
}
],
"dependsOn": {
"setting": "allowEditRows",
"value": false
}
},
{ {
"type": "boolean", "type": "boolean",
"label": "Add rows", "label": "Add rows",

View File

@ -14,12 +14,14 @@
export let initialSortOrder = null export let initialSortOrder = null
export let fixedRowHeight = null export let fixedRowHeight = null
export let columns = null export let columns = null
export let onRowClick = null
const component = getContext("component") const component = getContext("component")
const { styleable, API, builderStore, notificationStore } = getContext("sdk") const { styleable, API, builderStore, notificationStore } = getContext("sdk")
$: columnWhitelist = columns?.map(col => col.name) $: columnWhitelist = columns?.map(col => col.name)
$: schemaOverrides = getSchemaOverrides(columns) $: schemaOverrides = getSchemaOverrides(columns)
$: handleRowClick = allowEditRows ? undefined : onRowClick
const getSchemaOverrides = columns => { const getSchemaOverrides = columns => {
let overrides = {} let overrides = {}
@ -56,6 +58,7 @@
showControls={false} showControls={false}
notifySuccess={notificationStore.actions.success} notifySuccess={notificationStore.actions.success}
notifyError={notificationStore.actions.error} notifyError={notificationStore.actions.error}
on:rowclick={e => handleRowClick?.({ row: e.detail })}
/> />
</div> </div>

View File

@ -17,13 +17,24 @@
const { config, dispatch, selectedRows } = getContext("grid") const { config, dispatch, selectedRows } = getContext("grid")
const svelteDispatch = createEventDispatcher() const svelteDispatch = createEventDispatcher()
const select = () => { const select = e => {
e.stopPropagation()
svelteDispatch("select") svelteDispatch("select")
const id = row?._id const id = row?._id
if (id) { if (id) {
selectedRows.actions.toggleRow(id) selectedRows.actions.toggleRow(id)
} }
} }
const bulkDelete = e => {
e.stopPropagation()
dispatch("request-bulk-delete")
}
const expand = e => {
e.stopPropagation()
svelteDispatch("expand")
}
</script> </script>
<GridCell <GridCell
@ -56,7 +67,7 @@
{/if} {/if}
{/if} {/if}
{#if rowSelected && $config.canDeleteRows} {#if rowSelected && $config.canDeleteRows}
<div class="delete" on:click={() => dispatch("request-bulk-delete")}> <div class="delete" on:click={bulkDelete}>
<Icon <Icon
name="Delete" name="Delete"
size="S" size="S"
@ -65,12 +76,7 @@
</div> </div>
{:else} {:else}
<div class="expand" class:visible={$config.canExpandRows && expandable}> <div class="expand" class:visible={$config.canExpandRows && expandable}>
<Icon <Icon size="S" name="Maximize" hoverable on:click={expand} />
size="S"
name="Maximize"
hoverable
on:click={() => svelteDispatch("expand")}
/>
</div> </div>
{/if} {/if}
</div> </div>

View File

@ -35,7 +35,7 @@
</script> </script>
<div bind:this={body} class="grid-body"> <div bind:this={body} class="grid-body">
<GridScrollWrapper scrollHorizontally scrollVertically wheelInteractive> <GridScrollWrapper scrollHorizontally scrollVertically attachHandlers>
{#each $renderedRows as row, idx} {#each $renderedRows as row, idx}
<GridRow <GridRow
{row} {row}

View File

@ -17,6 +17,7 @@
columnHorizontalInversionIndex, columnHorizontalInversionIndex,
contentLines, contentLines,
isDragging, isDragging,
dispatch,
} = getContext("grid") } = getContext("grid")
$: rowSelected = !!$selectedRows[row._id] $: rowSelected = !!$selectedRows[row._id]
@ -30,6 +31,7 @@
on:focus on:focus
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)} on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
> >
{#each $renderedColumns as column, columnIdx (column.name)} {#each $renderedColumns as column, columnIdx (column.name)}
{@const cellId = `${row._id}-${column.name}`} {@const cellId = `${row._id}-${column.name}`}

View File

@ -17,7 +17,11 @@
export let scrollVertically = false export let scrollVertically = false
export let scrollHorizontally = false export let scrollHorizontally = false
export let wheelInteractive = false export let attachHandlers = false
// Used for tracking touch events
let initialTouchX
let initialTouchY
$: style = generateStyle($scroll, $rowHeight, $hiddenColumnsWidth) $: style = generateStyle($scroll, $rowHeight, $hiddenColumnsWidth)
@ -27,17 +31,47 @@
return `transform: translate3d(${offsetX}px, ${offsetY}px, 0);` return `transform: translate3d(${offsetX}px, ${offsetY}px, 0);`
} }
// Handles a wheel even and updates the scroll offsets // Handles a mouse wheel event and updates scroll state
const handleWheel = e => { const handleWheel = e => {
e.preventDefault() e.preventDefault()
debouncedHandleWheel(e.deltaX, e.deltaY, e.clientY) updateScroll(e.deltaX, e.deltaY, e.clientY)
// If a context menu was visible, hide it // If a context menu was visible, hide it
if ($menu.visible) { if ($menu.visible) {
menu.actions.close() menu.actions.close()
} }
} }
const debouncedHandleWheel = domDebounce((deltaX, deltaY, clientY) => {
// Handles touch start events
const handleTouchStart = e => {
if (!e.touches?.[0]) return
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
}
// Handles touch move events and updates scroll state
const handleTouchMove = e => {
if (!e.touches?.[0]) return
e.preventDefault()
// Compute delta from previous event, and update scroll
const deltaX = initialTouchX - e.touches[0].clientX
const deltaY = initialTouchY - e.touches[0].clientY
updateScroll(deltaX, deltaY)
// Store position to reference in next event
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
// If a context menu was visible, hide it
if ($menu.visible) {
menu.actions.close()
}
}
// Updates the scroll offset by a certain delta, and ensure scrolling
// stays within sensible bounds. Debounced for performance.
const updateScroll = domDebounce((deltaX, deltaY, clientY) => {
const { top, left } = $scroll const { top, left } = $scroll
// Calculate new scroll top // Calculate new scroll top
@ -55,15 +89,19 @@
}) })
// Hover row under cursor // Hover row under cursor
const y = clientY - $bounds.top + (newScrollTop % $rowHeight) if (clientY != null) {
const hoveredRow = $renderedRows[Math.floor(y / $rowHeight)] const y = clientY - $bounds.top + (newScrollTop % $rowHeight)
hoveredRowId.set(hoveredRow?._id) const hoveredRow = $renderedRows[Math.floor(y / $rowHeight)]
hoveredRowId.set(hoveredRow?._id)
}
}) })
</script> </script>
<div <div
class="outer" class="outer"
on:wheel={wheelInteractive ? handleWheel : null} on:wheel={attachHandlers ? handleWheel : null}
on:touchstart={attachHandlers ? handleTouchStart : null}
on:touchmove={attachHandlers ? handleTouchMove : null}
on:click|self={() => ($focusedCellId = null)} on:click|self={() => ($focusedCellId = null)}
> >
<div {style} class="inner"> <div {style} class="inner">

View File

@ -205,7 +205,7 @@
{/if} {/if}
</div> </div>
<div class="normal-columns" transition:fade|local={{ duration: 130 }}> <div class="normal-columns" transition:fade|local={{ duration: 130 }}>
<GridScrollWrapper scrollHorizontally wheelInteractive> <GridScrollWrapper scrollHorizontally attachHandlers>
<div class="row"> <div class="row">
{#each $renderedColumns as column, columnIdx} {#each $renderedColumns as column, columnIdx}
{@const cellId = `new-${column.name}`} {@const cellId = `new-${column.name}`}

View File

@ -64,7 +64,7 @@
</div> </div>
<div class="content" on:mouseleave={() => ($hoveredRowId = null)}> <div class="content" on:mouseleave={() => ($hoveredRowId = null)}>
<GridScrollWrapper scrollVertically wheelInteractive> <GridScrollWrapper scrollVertically attachHandlers>
{#each $renderedRows as row, idx} {#each $renderedRows as row, idx}
{@const rowSelected = !!$selectedRows[row._id]} {@const rowSelected = !!$selectedRows[row._id]}
{@const rowHovered = $hoveredRowId === row._id} {@const rowHovered = $hoveredRowId === row._id}
@ -74,6 +74,7 @@
class="row" class="row"
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)} on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
> >
<GutterCell {row} {rowFocused} {rowHovered} {rowSelected} /> <GutterCell {row} {rowFocused} {rowHovered} {rowSelected} />
{#if $stickyColumn} {#if $stickyColumn}

View File

@ -53,18 +53,27 @@
} }
} }
const getLocation = e => {
return {
y: e.touches?.[0]?.clientY ?? e.clientY,
x: e.touches?.[0]?.clientX ?? e.clientX,
}
}
// V scrollbar drag handlers // V scrollbar drag handlers
const startVDragging = e => { const startVDragging = e => {
e.preventDefault() e.preventDefault()
initialMouse = e.clientY initialMouse = getLocation(e).y
initialScroll = $scrollTop initialScroll = $scrollTop
document.addEventListener("mousemove", moveVDragging) document.addEventListener("mousemove", moveVDragging)
document.addEventListener("touchmove", moveVDragging)
document.addEventListener("mouseup", stopVDragging) document.addEventListener("mouseup", stopVDragging)
document.addEventListener("touchend", stopVDragging)
isDraggingV = true isDraggingV = true
closeMenu() closeMenu()
} }
const moveVDragging = domDebounce(e => { const moveVDragging = domDebounce(e => {
const delta = e.clientY - initialMouse const delta = getLocation(e).y - initialMouse
const weight = delta / availHeight const weight = delta / availHeight
const newScrollTop = initialScroll + weight * $maxScrollTop const newScrollTop = initialScroll + weight * $maxScrollTop
scroll.update(state => ({ scroll.update(state => ({
@ -74,22 +83,26 @@
}) })
const stopVDragging = () => { const stopVDragging = () => {
document.removeEventListener("mousemove", moveVDragging) document.removeEventListener("mousemove", moveVDragging)
document.removeEventListener("touchmove", moveVDragging)
document.removeEventListener("mouseup", stopVDragging) document.removeEventListener("mouseup", stopVDragging)
document.removeEventListener("touchend", stopVDragging)
isDraggingV = false isDraggingV = false
} }
// H scrollbar drag handlers // H scrollbar drag handlers
const startHDragging = e => { const startHDragging = e => {
e.preventDefault() e.preventDefault()
initialMouse = e.clientX initialMouse = getLocation(e).x
initialScroll = $scrollLeft initialScroll = $scrollLeft
document.addEventListener("mousemove", moveHDragging) document.addEventListener("mousemove", moveHDragging)
document.addEventListener("touchmove", moveHDragging)
document.addEventListener("mouseup", stopHDragging) document.addEventListener("mouseup", stopHDragging)
document.addEventListener("touchend", stopHDragging)
isDraggingH = true isDraggingH = true
closeMenu() closeMenu()
} }
const moveHDragging = domDebounce(e => { const moveHDragging = domDebounce(e => {
const delta = e.clientX - initialMouse const delta = getLocation(e).x - initialMouse
const weight = delta / availWidth const weight = delta / availWidth
const newScrollLeft = initialScroll + weight * $maxScrollLeft const newScrollLeft = initialScroll + weight * $maxScrollLeft
scroll.update(state => ({ scroll.update(state => ({
@ -99,7 +112,9 @@
}) })
const stopHDragging = () => { const stopHDragging = () => {
document.removeEventListener("mousemove", moveHDragging) document.removeEventListener("mousemove", moveHDragging)
document.removeEventListener("touchmove", moveHDragging)
document.removeEventListener("mouseup", stopHDragging) document.removeEventListener("mouseup", stopHDragging)
document.removeEventListener("touchend", stopHDragging)
isDraggingH = false isDraggingH = false
} }
</script> </script>
@ -109,6 +124,7 @@
class="v-scrollbar" class="v-scrollbar"
style="--size:{ScrollBarSize}px; top:{barTop}px; height:{barHeight}px;" style="--size:{ScrollBarSize}px; top:{barTop}px; height:{barHeight}px;"
on:mousedown={startVDragging} on:mousedown={startVDragging}
on:touchstart={startVDragging}
class:dragging={isDraggingV} class:dragging={isDraggingV}
/> />
{/if} {/if}
@ -117,6 +133,7 @@
class="h-scrollbar" class="h-scrollbar"
style="--size:{ScrollBarSize}px; left:{barLeft}px; width:{barWidth}px;" style="--size:{ScrollBarSize}px; left:{barLeft}px; width:{barWidth}px;"
on:mousedown={startHDragging} on:mousedown={startHDragging}
on:touchstart={startHDragging}
class:dragging={isDraggingH} class:dragging={isDraggingH}
/> />
{/if} {/if}

View File

@ -1,4 +1,5 @@
import { writable, get } from "svelte/store" import { writable, get } from "svelte/store"
import { Helpers } from "@budibase/bbui"
export const createStores = () => { export const createStores = () => {
const copiedCell = writable(null) const copiedCell = writable(null)
@ -12,7 +13,16 @@ export const createActions = context => {
const { copiedCell, focusedCellAPI } = context const { copiedCell, focusedCellAPI } = context
const copy = () => { const copy = () => {
copiedCell.set(get(focusedCellAPI)?.getValue()) const value = get(focusedCellAPI)?.getValue()
copiedCell.set(value)
// Also copy a stringified version to the clipboard
let stringified = ""
if (value != null && value !== "") {
// Only conditionally stringify to avoid redundant quotes around text
stringified = typeof value === "object" ? JSON.stringify(value) : value
}
Helpers.copyToClipboard(stringified)
} }
const paste = () => { const paste = () => {

View File

@ -11,15 +11,14 @@
"scripts": { "scripts": {
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"build": "node ./scripts/build.js", "build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null", "check:types": "tsc -p tsconfig.json --noEmit --paths null",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"test": "bash scripts/test.sh", "test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit", "test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch", "test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/", "build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up", "dev:stack:up": "node scripts/dev/manage.js up",
@ -54,7 +53,7 @@
"@bull-board/api": "3.7.0", "@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4", "@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0", "@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "5.0.2", "@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8", "@koa/router": "8.0.8",
"@sentry/node": "6.17.7", "@sentry/node": "6.17.7",
"@socket.io/redis-adapter": "^8.2.1", "@socket.io/redis-adapter": "^8.2.1",
@ -70,7 +69,6 @@
"curlconverter": "3.21.0", "curlconverter": "3.21.0",
"dd-trace": "3.13.2", "dd-trace": "3.13.2",
"dotenv": "8.2.0", "dotenv": "8.2.0",
"fix-path": "3.0.0",
"form-data": "4.0.0", "form-data": "4.0.0",
"global-agent": "3.0.0", "global-agent": "3.0.0",
"google-auth-library": "7.12.0", "google-auth-library": "7.12.0",
@ -96,12 +94,11 @@
"object-sizeof": "2.6.1", "object-sizeof": "2.6.1",
"open": "8.4.0", "open": "8.4.0",
"openai": "^3.2.1", "openai": "^3.2.1",
"openapi-types": "9.3.1",
"pg": "8.10.0", "pg": "8.10.0",
"posthog-node": "1.3.0",
"pouchdb": "7.3.0", "pouchdb": "7.3.0",
"pouchdb-all-dbs": "1.0.2", "pouchdb-all-dbs": "1.1.1",
"pouchdb-find": "7.2.2", "pouchdb-find": "7.2.2",
"pouchdb-replication-stream": "1.2.9",
"redis": "4", "redis": "4",
"server-destroy": "1.0.1", "server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0", "snowflake-promise": "^4.5.0",
@ -113,8 +110,7 @@
"validate.js": "0.13.1", "validate.js": "0.13.1",
"vm2": "^3.9.19", "vm2": "^3.9.19",
"worker-farm": "1.7.0", "worker-farm": "1.7.0",
"xml2js": "0.5.0", "xml2js": "0.5.0"
"yargs": "13.2.4"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "7.17.4", "@babel/core": "7.17.4",
@ -144,7 +140,6 @@
"jest-runner": "29.6.2", "jest-runner": "29.6.2",
"jest-serial-runner": "1.2.1", "jest-serial-runner": "1.2.1",
"nodemon": "2.0.15", "nodemon": "2.0.15",
"openapi-types": "9.3.1",
"openapi-typescript": "5.2.0", "openapi-typescript": "5.2.0",
"path-to-regexp": "6.2.0", "path-to-regexp": "6.2.0",
"rimraf": "3.0.2", "rimraf": "3.0.2",
@ -154,7 +149,8 @@
"ts-node": "10.8.1", "ts-node": "10.8.1",
"tsconfig-paths": "4.0.0", "tsconfig-paths": "4.0.0",
"typescript": "5.2.2", "typescript": "5.2.2",
"update-dotenv": "1.1.1" "update-dotenv": "1.1.1",
"yargs": "13.2.4"
}, },
"optionalDependencies": { "optionalDependencies": {
"oracledb": "5.3.0" "oracledb": "5.3.0"
@ -171,6 +167,22 @@
"target": "build" "target": "build"
} }
] ]
},
"build": {
"outputs": [
"{projectRoot}/builder",
"{projectRoot}/client",
"{projectRoot}/dist"
],
"dependsOn": [
{
"projects": [
"@budibase/client",
"@budibase/builder"
],
"target": "build"
}
]
} }
} }
} }

View File

@ -5,7 +5,6 @@ import {
getTableParams, getTableParams,
} from "../../db/utils" } from "../../db/utils"
import { destroy as tableDestroy } from "./table/internal" import { destroy as tableDestroy } from "./table/internal"
import { BuildSchemaErrors, InvalidColumns } from "../../constants"
import { getIntegration } from "../../integrations" import { getIntegration } from "../../integrations"
import { invalidateDynamicVariables } from "../../threads/utils" import { invalidateDynamicVariables } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core" import { context, db as dbCore, events } from "@budibase/backend-core"
@ -14,10 +13,13 @@ import {
CreateDatasourceResponse, CreateDatasourceResponse,
Datasource, Datasource,
DatasourcePlus, DatasourcePlus,
ExternalTable,
FetchDatasourceInfoRequest, FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse, FetchDatasourceInfoResponse,
IntegrationBase, IntegrationBase,
Schema,
SourceName, SourceName,
Table,
UpdateDatasourceResponse, UpdateDatasourceResponse,
UserCtx, UserCtx,
VerifyDatasourceRequest, VerifyDatasourceRequest,
@ -27,23 +29,6 @@ import sdk from "../../sdk"
import { builderSocket } from "../../websockets" import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets" import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
function getErrorTables(errors: any, errorType: string) {
return Object.entries(errors)
.filter(entry => entry[1] === errorType)
.map(([name]) => name)
}
function updateError(error: any, newError: any, tables: string[]) {
if (!error) {
error = ""
}
if (error.length > 0) {
error += "\n"
}
error += `${newError} ${tables.join(", ")}`
return error
}
async function getConnector( async function getConnector(
datasource: Datasource datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> { ): Promise<IntegrationBase | DatasourcePlus> {
@ -71,48 +56,36 @@ async function getAndMergeDatasource(datasource: Datasource) {
return await sdk.datasources.enrich(enrichedDatasource) return await sdk.datasources.enrich(enrichedDatasource)
} }
async function buildSchemaHelper(datasource: Datasource) { async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus const connector = (await getConnector(datasource)) as DatasourcePlus
await connector.buildSchema(datasource._id!, datasource.entities!) return await connector.buildSchema(
datasource._id!,
const errors = connector.schemaErrors datasource.entities! as Record<string, ExternalTable>
let error = null )
if (errors && Object.keys(errors).length > 0) {
const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY)
const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN)
if (noKey.length) {
error = updateError(
error,
"No primary key constraint found for the following:",
noKey
)
}
if (invalidCol.length) {
const invalidCols = Object.values(InvalidColumns).join(", ")
error = updateError(
error,
`Cannot use columns ${invalidCols} found in following:`,
invalidCol
)
}
}
return { tables: connector.tables, error }
} }
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) { async function buildFilteredSchema(
let { tables, error } = await buildSchemaHelper(datasource) datasource: Datasource,
let finalTables = tables filter?: string[]
if (filter) { ): Promise<Schema> {
finalTables = {} let schema = await buildSchemaHelper(datasource)
for (let key in tables) { if (!filter) {
if ( return schema
filter.some((filter: any) => filter.toLowerCase() === key.toLowerCase()) }
) {
finalTables[key] = tables[key] let filteredSchema: Schema = { tables: {}, errors: {} }
} for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
} }
} }
return { tables: finalTables, error }
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
}
return filteredSchema
} }
export async function fetch(ctx: UserCtx) { export async function fetch(ctx: UserCtx) {
@ -156,7 +129,7 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const tablesFilter = ctx.request.body.tablesFilter const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId) const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const { tables, error } = await buildFilteredSchema(datasource, tablesFilter) const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables datasource.entities = tables
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)
@ -164,13 +137,11 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
sdk.tables.populateExternalTableSchemas(datasource) sdk.tables.populateExternalTableSchemas(datasource)
) )
datasource._rev = dbResp.rev datasource._rev = dbResp.rev
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
const res: any = { datasource: cleanedDatasource } ctx.body = {
if (error) { datasource: await sdk.datasources.removeSecretSingle(datasource),
res.error = error errors,
} }
ctx.body = res
} }
/** /**
@ -298,15 +269,12 @@ export async function save(
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE, type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
} }
let schemaError = null let errors: Record<string, string> = {}
if (fetchSchema) { if (fetchSchema) {
const { tables, error } = await buildFilteredSchema( const schema = await buildFilteredSchema(datasource, tablesFilter)
datasource, datasource.entities = schema.tables
tablesFilter
)
schemaError = error
datasource.entities = tables
setDefaultDisplayColumns(datasource) setDefaultDisplayColumns(datasource)
errors = schema.errors
} }
if (preSaveAction[datasource.source]) { if (preSaveAction[datasource.source]) {
@ -327,13 +295,10 @@ export async function save(
} }
} }
const response: CreateDatasourceResponse = { ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource), datasource: await sdk.datasources.removeSecretSingle(datasource),
errors,
} }
if (schemaError) {
response.error = schemaError
}
ctx.body = response
builderSocket?.emitDatasourceUpdate(ctx, datasource) builderSocket?.emitDatasourceUpdate(ctx, datasource)
} }

View File

@ -23,7 +23,10 @@ describe("/applications/:appId/import", () => {
.set(config.defaultHeaders()) .set(config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(200)
expect(res.body.message).toBe("app updated") const appPackage = await config.api.application.get(appId!)
expect(appPackage.navigation?.links?.length).toBe(2)
expect(expect(appPackage.navigation?.links?.[0].url).toBe("/blank"))
expect(expect(appPackage.navigation?.links?.[1].url).toBe("/derp"))
const screens = await config.api.screen.list() const screens = await config.api.screen.list()
expect(screens.length).toBe(2) expect(screens.length).toBe(2)
expect(screens[0].routing.route).toBe("/derp") expect(screens[0].routing.route).toBe("/derp")

View File

@ -37,7 +37,7 @@ describe("/datasources", () => {
.expect(200) .expect(200)
expect(res.body.datasource.name).toEqual("Test") expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toBeUndefined() expect(res.body.errors).toEqual({})
expect(events.datasource.created).toBeCalledTimes(1) expect(events.datasource.created).toBeCalledTimes(1)
}) })
}) })

View File

@ -1,4 +1,4 @@
import Sentry from "@sentry/node" import * as Sentry from "@sentry/node"
if (process.env.DD_APM_ENABLED) { if (process.env.DD_APM_ENABLED) {
require("./ddApm") require("./ddApm")

View File

@ -159,11 +159,6 @@ export enum InvalidColumns {
TABLE_ID = "tableId", TABLE_ID = "tableId",
} }
export enum BuildSchemaErrors {
NO_KEY = "no_key",
INVALID_COLUMN = "invalid_column",
}
export enum AutomationErrors { export enum AutomationErrors {
INCORRECT_TYPE = "INCORRECT_TYPE", INCORRECT_TYPE = "INCORRECT_TYPE",
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED", MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",

View File

@ -1,12 +1,11 @@
import { bootstrap } from "global-agent" import { bootstrap } from "global-agent"
const fixPath = require("fix-path")
import { checkDevelopmentEnvironment } from "./utilities/fileSystem" import { checkDevelopmentEnvironment } from "./utilities/fileSystem"
function runServer() { function runServer() {
// this will shutdown the system if development environment not ready // this will shutdown the system if development environment not ready
// will print an error explaining what to do // will print an error explaining what to do
checkDevelopmentEnvironment() checkDevelopmentEnvironment()
fixPath()
// this will setup http and https proxies form env variables // this will setup http and https proxies form env variables
process.env.GLOBAL_AGENT_FORCE_GLOBAL_AGENT = "false" process.env.GLOBAL_AGENT_FORCE_GLOBAL_AGENT = "false"
bootstrap() bootstrap()

View File

@ -18,6 +18,7 @@ import _ from "lodash"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import { utils } from "@budibase/backend-core" import { utils } from "@budibase/backend-core"
import { databaseTestProviders } from "../integrations/tests/utils" import { databaseTestProviders } from "../integrations/tests/utils"
import { Client } from "pg"
const config = setup.getConfig()! const config = setup.getConfig()!
@ -1055,4 +1056,46 @@ describe("postgres integrations", () => {
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1) expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
}) })
}) })
describe("POST /api/datasources/:datasourceId/schema", () => {
let client: Client
beforeEach(async () => {
client = new Client(
(await databaseTestProviders.postgres.getDsConfig()).config!
)
await client.connect()
})
afterEach(async () => {
await client.query(`DROP TABLE IF EXISTS "table"`)
await client.end()
})
it("recognises when a table has no primary key", async () => {
await client.query(`CREATE TABLE "table" (id SERIAL)`)
const response = await makeRequest(
"post",
`/api/datasources/${postgresDatasource._id}/schema`
)
expect(response.body.errors).toEqual({
table: "Table must have a primary key.",
})
})
it("recognises when a table is using a reserved column name", async () => {
await client.query(`CREATE TABLE "table" (_id SERIAL PRIMARY KEY) `)
const response = await makeRequest(
"post",
`/api/datasources/${postgresDatasource._id}/schema`
)
expect(response.body.errors).toEqual({
table: "Table contains invalid columns.",
})
})
})
}) })

View File

@ -14,9 +14,14 @@ import {
SortJson, SortJson,
ExternalTable, ExternalTable,
TableRequest, TableRequest,
Schema,
} from "@budibase/types" } from "@budibase/types"
import { OAuth2Client } from "google-auth-library" import { OAuth2Client } from "google-auth-library"
import { buildExternalTableId, finaliseExternalTables } from "./utils" import {
buildExternalTableId,
checkExternalTables,
finaliseExternalTables,
} from "./utils"
import { GoogleSpreadsheet, GoogleSpreadsheetRow } from "google-spreadsheet" import { GoogleSpreadsheet, GoogleSpreadsheetRow } from "google-spreadsheet"
import fetch from "node-fetch" import fetch from "node-fetch"
import { cache, configs, context, HTTPError } from "@budibase/backend-core" import { cache, configs, context, HTTPError } from "@budibase/backend-core"
@ -138,8 +143,6 @@ const SCHEMA: Integration = {
class GoogleSheetsIntegration implements DatasourcePlus { class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig private readonly config: GoogleSheetsConfig
private client: GoogleSpreadsheet private client: GoogleSpreadsheet
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: GoogleSheetsConfig) { constructor(config: GoogleSheetsConfig) {
this.config = config this.config = config
@ -281,19 +284,37 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, ExternalTable>
) { ): Promise<Schema> {
// not fully configured yet // not fully configured yet
if (!this.config.auth) { if (!this.config.auth) {
return return { tables: {}, errors: {} }
} }
await this.connect() await this.connect()
const sheets = this.client.sheetsByIndex const sheets = this.client.sheetsByIndex
const tables: Record<string, ExternalTable> = {} const tables: Record<string, ExternalTable> = {}
let errors: Record<string, string> = {}
await utils.parallelForeach( await utils.parallelForeach(
sheets, sheets,
async sheet => { async sheet => {
// must fetch rows to determine schema // must fetch rows to determine schema
await sheet.getRows() try {
await sheet.getRows()
} catch (err) {
// We expect this to always be an Error so if it's not, rethrow it to
// make sure we don't fail quietly.
if (!(err instanceof Error)) {
throw err
}
if (err.message.startsWith("No values in the header row")) {
errors[sheet.title] = err.message
} else {
// If we get an error we don't expect, rethrow to avoid failing
// quietly.
throw err
}
return
}
const id = buildExternalTableId(datasourceId, sheet.title) const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema( tables[sheet.title] = this.getTableSchema(
@ -305,9 +326,9 @@ class GoogleSheetsIntegration implements DatasourcePlus {
}, },
10 10
) )
const final = finaliseExternalTables(tables, entities) let externalTables = finaliseExternalTables(tables, entities)
this.tables = final.tables errors = { ...errors, ...checkExternalTables(externalTables) }
this.schemaErrors = final.errors return { tables: externalTables, errors }
} }
async query(json: QueryJson) { async query(json: QueryJson) {

View File

@ -11,6 +11,7 @@ import {
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
SourceName, SourceName,
Schema,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -18,6 +19,7 @@ import {
convertSqlType, convertSqlType,
finaliseExternalTables, finaliseExternalTables,
SqlClient, SqlClient,
checkExternalTables,
} from "./utils" } from "./utils"
import Sql from "./base/sql" import Sql from "./base/sql"
import { MSSQLTablesResponse, MSSQLColumn } from "./base/types" import { MSSQLTablesResponse, MSSQLColumn } from "./base/types"
@ -190,8 +192,6 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig private readonly config: MSSQLConfig
private index: number = 0 private index: number = 0
private client?: sqlServer.ConnectionPool private client?: sqlServer.ConnectionPool
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
MASTER_TABLES = [ MASTER_TABLES = [
"spt_fallback_db", "spt_fallback_db",
@ -381,7 +381,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, ExternalTable>
) { ): Promise<Schema> {
await this.connect() await this.connect()
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL) let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
if (tableInfo == null || !Array.isArray(tableInfo)) { if (tableInfo == null || !Array.isArray(tableInfo)) {
@ -445,9 +445,12 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
schema, schema,
} }
} }
const final = finaliseExternalTables(tables, entities) let externalTables = finaliseExternalTables(tables, entities)
this.tables = final.tables let errors = checkExternalTables(externalTables)
this.schemaErrors = final.errors return {
tables: externalTables,
errors,
}
} }
async queryTableNames() { async queryTableNames() {

View File

@ -10,6 +10,7 @@ import {
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
SourceName, SourceName,
Schema,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -17,6 +18,7 @@ import {
buildExternalTableId, buildExternalTableId,
convertSqlType, convertSqlType,
finaliseExternalTables, finaliseExternalTables,
checkExternalTables,
} from "./utils" } from "./utils"
import dayjs from "dayjs" import dayjs from "dayjs"
import { NUMBER_REGEX } from "../utilities" import { NUMBER_REGEX } from "../utilities"
@ -140,8 +142,6 @@ export function bindingTypeCoerce(bindings: any[]) {
class MySQLIntegration extends Sql implements DatasourcePlus { class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig private config: MySQLConfig
private client?: mysql.Connection private client?: mysql.Connection
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) { constructor(config: MySQLConfig) {
super(SqlClient.MY_SQL) super(SqlClient.MY_SQL)
@ -279,7 +279,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, ExternalTable>
) { ): Promise<Schema> {
const tables: { [key: string]: ExternalTable } = {} const tables: { [key: string]: ExternalTable } = {}
await this.connect() await this.connect()
@ -328,9 +328,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
} finally { } finally {
await this.disconnect() await this.disconnect()
} }
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables let externalTables = finaliseExternalTables(tables, entities)
this.schemaErrors = final.errors let errors = checkExternalTables(tables)
return { tables: externalTables, errors }
} }
async queryTableNames() { async queryTableNames() {

View File

@ -9,9 +9,11 @@ import {
DatasourcePlus, DatasourcePlus,
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
Schema,
} from "@budibase/types" } from "@budibase/types"
import { import {
buildExternalTableId, buildExternalTableId,
checkExternalTables,
convertSqlType, convertSqlType,
finaliseExternalTables, finaliseExternalTables,
getSqlQuery, getSqlQuery,
@ -108,9 +110,6 @@ class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig private readonly config: OracleConfig
private index: number = 1 private index: number = 1
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
private readonly COLUMNS_SQL = ` private readonly COLUMNS_SQL = `
SELECT SELECT
tabs.table_name, tabs.table_name,
@ -265,7 +264,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, ExternalTable>
) { ): Promise<Schema> {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({ const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL, sql: this.COLUMNS_SQL,
}) })
@ -326,9 +325,9 @@ class OracleIntegration extends Sql implements DatasourcePlus {
}) })
}) })
const final = finaliseExternalTables(tables, entities) let externalTables = finaliseExternalTables(tables, entities)
this.tables = final.tables let errors = checkExternalTables(externalTables)
this.schemaErrors = final.errors return { tables: externalTables, errors }
} }
async getTableNames() { async getTableNames() {

View File

@ -10,6 +10,7 @@ import {
DatasourceFeature, DatasourceFeature,
ConnectionInfo, ConnectionInfo,
SourceName, SourceName,
Schema,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -17,6 +18,7 @@ import {
convertSqlType, convertSqlType,
finaliseExternalTables, finaliseExternalTables,
SqlClient, SqlClient,
checkExternalTables,
} from "./utils" } from "./utils"
import Sql from "./base/sql" import Sql from "./base/sql"
import { PostgresColumn } from "./base/types" import { PostgresColumn } from "./base/types"
@ -145,8 +147,6 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
private readonly config: PostgresConfig private readonly config: PostgresConfig
private index: number = 1 private index: number = 1
private open: boolean private open: boolean
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
COLUMNS_SQL!: string COLUMNS_SQL!: string
@ -274,7 +274,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
async buildSchema( async buildSchema(
datasourceId: string, datasourceId: string,
entities: Record<string, ExternalTable> entities: Record<string, ExternalTable>
) { ): Promise<Schema> {
let tableKeys: { [key: string]: string[] } = {} let tableKeys: { [key: string]: string[] } = {}
await this.openConnection() await this.openConnection()
try { try {
@ -342,9 +342,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
} }
} }
const final = finaliseExternalTables(tables, entities) let finalizedTables = finaliseExternalTables(tables, entities)
this.tables = final.tables let errors = checkExternalTables(finalizedTables)
this.schemaErrors = final.errors return { tables: finalizedTables, errors }
} catch (err) { } catch (err) {
// @ts-ignore // @ts-ignore
throw new Error(err) throw new Error(err)

View File

@ -4,13 +4,10 @@ import {
SearchFilters, SearchFilters,
Datasource, Datasource,
FieldType, FieldType,
ExternalTable,
} from "@budibase/types" } from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils" import { DocumentType, SEPARATOR } from "../db/utils"
import { import { InvalidColumns, NoEmptyFilterStrings } from "../constants"
BuildSchemaErrors,
InvalidColumns,
NoEmptyFilterStrings,
} from "../constants"
import { helpers } from "@budibase/shared-core" import { helpers } from "@budibase/shared-core"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
@ -266,9 +263,9 @@ export function shouldCopySpecialColumn(
function copyExistingPropsOver( function copyExistingPropsOver(
tableName: string, tableName: string,
table: Table, table: Table,
entities: { [key: string]: any }, entities: Record<string, Table>,
tableIds: [string] tableIds: string[]
) { ): Table {
if (entities && entities[tableName]) { if (entities && entities[tableName]) {
if (entities[tableName]?.primaryDisplay) { if (entities[tableName]?.primaryDisplay) {
table.primaryDisplay = entities[tableName].primaryDisplay table.primaryDisplay = entities[tableName].primaryDisplay
@ -295,42 +292,41 @@ function copyExistingPropsOver(
/** /**
* Look through the final table definitions to see if anything needs to be * Look through the final table definitions to see if anything needs to be
* copied over from the old and if any errors have occurred mark them so * copied over from the old.
* that the user can be made aware.
* @param tables The list of tables that have been retrieved from the external database. * @param tables The list of tables that have been retrieved from the external database.
* @param entities The old list of tables, if there was any to look for definitions in. * @param entities The old list of tables, if there was any to look for definitions in.
*/ */
export function finaliseExternalTables( export function finaliseExternalTables(
tables: { [key: string]: any }, tables: Record<string, ExternalTable>,
entities: { [key: string]: any } entities: Record<string, ExternalTable>
) { ): Record<string, ExternalTable> {
const invalidColumns = Object.values(InvalidColumns) let finalTables: Record<string, Table> = {}
let finalTables: { [key: string]: any } = {} const tableIds = Object.values(tables).map(table => table._id!)
const errors: { [key: string]: string } = {}
// @ts-ignore
const tableIds: [string] = Object.values(tables).map(table => table._id)
for (let [name, table] of Object.entries(tables)) { for (let [name, table] of Object.entries(tables)) {
const schemaFields = Object.keys(table.schema)
// make sure every table has a key
if (table.primary == null || table.primary.length === 0) {
errors[name] = BuildSchemaErrors.NO_KEY
continue
} else if (
schemaFields.find(field =>
invalidColumns.includes(field as InvalidColumns)
)
) {
errors[name] = BuildSchemaErrors.INVALID_COLUMN
continue
}
// make sure all previous props have been added back
finalTables[name] = copyExistingPropsOver(name, table, entities, tableIds) finalTables[name] = copyExistingPropsOver(name, table, entities, tableIds)
} }
// sort the tables by name // sort the tables by name, this is for the UI to display them in alphabetical order
finalTables = Object.entries(finalTables) return Object.entries(finalTables)
.sort(([a], [b]) => a.localeCompare(b)) .sort(([a], [b]) => a.localeCompare(b))
.reduce((r, [k, v]) => ({ ...r, [k]: v }), {}) .reduce((r, [k, v]) => ({ ...r, [k]: v }), {})
return { tables: finalTables, errors } }
export function checkExternalTables(
tables: Record<string, ExternalTable>
): Record<string, string> {
const invalidColumns = Object.values(InvalidColumns) as string[]
const errors: Record<string, string> = {}
for (let [name, table] of Object.entries(tables)) {
if (!table.primary || table.primary.length === 0) {
errors[name] = "Table must have a primary key."
}
const schemaFields = Object.keys(table.schema)
if (schemaFields.find(f => invalidColumns.includes(f))) {
errors[name] = "Table contains invalid columns."
}
}
return errors
} }
/** /**

View File

@ -4,6 +4,8 @@ import {
Document, Document,
Database, Database,
RowValue, RowValue,
DocumentType,
App,
} from "@budibase/types" } from "@budibase/types"
import backups from "../backups" import backups from "../backups"
@ -12,9 +14,39 @@ export type FileAttributes = {
path: string path: string
} }
async function getNewAppMetadata(
tempDb: Database,
appDb: Database
): Promise<App> {
// static doc denoting app information
const docId = DocumentType.APP_METADATA
try {
const [tempMetadata, appMetadata] = await Promise.all([
tempDb.get<App>(docId),
appDb.get<App>(docId),
])
return {
...appMetadata,
automationErrors: undefined,
theme: tempMetadata.theme,
customTheme: tempMetadata.customTheme,
features: tempMetadata.features,
icon: tempMetadata.icon,
navigation: tempMetadata.navigation,
type: tempMetadata.type,
version: tempMetadata.version,
}
} catch (err: any) {
throw new Error(
`Unable to retrieve app metadata for import - ${err.message}`
)
}
}
function mergeUpdateAndDeleteDocuments( function mergeUpdateAndDeleteDocuments(
updateDocs: Document[], updateDocs: Document[],
deleteDocs: Document[] deleteDocs: Document[],
metadata: App
) { ) {
// compress the documents to create and to delete (if same ID, then just update the rev) // compress the documents to create and to delete (if same ID, then just update the rev)
const finalToDelete = [] const finalToDelete = []
@ -26,7 +58,7 @@ function mergeUpdateAndDeleteDocuments(
finalToDelete.push(deleteDoc) finalToDelete.push(deleteDoc)
} }
} }
return [...updateDocs, ...finalToDelete] return [...updateDocs, ...finalToDelete, metadata]
} }
async function removeImportableDocuments(db: Database) { async function removeImportableDocuments(db: Database) {
@ -90,12 +122,15 @@ export async function updateWithExport(
await backups.importApp(devId, tempDb, template, { await backups.importApp(devId, tempDb, template, {
importObjStoreContents: false, importObjStoreContents: false,
}) })
const newMetadata = await getNewAppMetadata(tempDb, appDb)
// get the documents to copy // get the documents to copy
const toUpdate = await getImportableDocuments(tempDb) const toUpdate = await getImportableDocuments(tempDb)
// clear out the old documents // clear out the old documents
const toDelete = await removeImportableDocuments(appDb) const toDelete = await removeImportableDocuments(appDb)
// now bulk update documents - add new ones, delete old ones and update common ones // now bulk update documents - add new ones, delete old ones and update common ones
await appDb.bulkDocs(mergeUpdateAndDeleteDocuments(toUpdate, toDelete)) await appDb.bulkDocs(
mergeUpdateAndDeleteDocuments(toUpdate, toDelete, newMetadata)
)
} finally { } finally {
await tempDb.destroy() await tempDb.destroy()
} }

View File

@ -0,0 +1,18 @@
import { App } from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
export class ApplicationAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
get = async (appId: string): Promise<App> => {
const result = await this.request
.get(`/api/applications/${appId}/appPackage`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body.application as App
}
}

View File

@ -6,6 +6,7 @@ import { ViewV2API } from "./viewV2"
import { DatasourceAPI } from "./datasource" import { DatasourceAPI } from "./datasource"
import { LegacyViewAPI } from "./legacyView" import { LegacyViewAPI } from "./legacyView"
import { ScreenAPI } from "./screen" import { ScreenAPI } from "./screen"
import { ApplicationAPI } from "./application"
export default class API { export default class API {
table: TableAPI table: TableAPI
@ -15,6 +16,7 @@ export default class API {
permission: PermissionAPI permission: PermissionAPI
datasource: DatasourceAPI datasource: DatasourceAPI
screen: ScreenAPI screen: ScreenAPI
application: ApplicationAPI
constructor(config: TestConfiguration) { constructor(config: TestConfiguration) {
this.table = new TableAPI(config) this.table = new TableAPI(config)
@ -24,5 +26,6 @@ export default class API {
this.permission = new PermissionAPI(config) this.permission = new PermissionAPI(config)
this.datasource = new DatasourceAPI(config) this.datasource = new DatasourceAPI(config)
this.screen = new ScreenAPI(config) this.screen = new ScreenAPI(config)
this.application = new ApplicationAPI(config)
} }
} }

View File

@ -14,5 +14,5 @@ export function isSQL(datasource: Datasource): boolean {
SourceName.MYSQL, SourceName.MYSQL,
SourceName.ORACLE, SourceName.ORACLE,
] ]
return SQL.indexOf(datasource.source) !== -1 return SQL.indexOf(datasource.source) !== -1 || datasource.isSQL === true
} }

View File

@ -2,7 +2,7 @@ import { Datasource } from "../../../documents"
export interface CreateDatasourceResponse { export interface CreateDatasourceResponse {
datasource: Datasource datasource: Datasource
error?: any errors: Record<string, string>
} }
export interface UpdateDatasourceResponse { export interface UpdateDatasourceResponse {

View File

@ -9,6 +9,7 @@ export interface Datasource extends Document {
// the config is defined by the schema // the config is defined by the schema
config?: Record<string, any> config?: Record<string, any>
plus?: boolean plus?: boolean
isSQL?: boolean
entities?: { entities?: {
[key: string]: Table [key: string]: Table
} }

View File

@ -1,4 +1,4 @@
import { Table } from "../documents" import { ExternalTable, Table } from "../documents"
export const PASSWORD_REPLACEMENT = "--secret-value--" export const PASSWORD_REPLACEMENT = "--secret-value--"
@ -140,6 +140,7 @@ export interface DatasourceConfig {
export interface Integration { export interface Integration {
docs: string docs: string
plus?: boolean plus?: boolean
isSQL?: boolean
auth?: { type: string } auth?: { type: string }
features?: Partial<Record<DatasourceFeature, boolean>> features?: Partial<Record<DatasourceFeature, boolean>>
relationships?: boolean relationships?: boolean
@ -174,14 +175,19 @@ export interface IntegrationBase {
}): void }): void
} }
export interface DatasourcePlus extends IntegrationBase { export interface Schema {
tables: Record<string, Table> tables: Record<string, ExternalTable>
schemaErrors: Record<string, string> errors: Record<string, string>
}
export interface DatasourcePlus extends IntegrationBase {
// if the datasource supports the use of bindings directly (to protect against SQL injection) // if the datasource supports the use of bindings directly (to protect against SQL injection)
// this returns the format of the identifier // this returns the format of the identifier
getBindingIdentifier(): string getBindingIdentifier(): string
getStringConcat(parts: string[]): string getStringConcat(parts: string[]): string
buildSchema(datasourceId: string, entities: Record<string, Table>): any buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
): Promise<Schema>
getTableNames(): Promise<string[]> getTableNames(): Promise<string[]>
} }

View File

@ -14,13 +14,13 @@
"scripts": { "scripts": {
"prebuild": "rimraf dist/", "prebuild": "rimraf dist/",
"build": "node ../../scripts/build.js", "build": "node ../../scripts/build.js",
"postbuild": "copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null", "check:types": "tsc -p tsconfig.json --noEmit --paths null",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"run:docker": "node dist/index.js", "run:docker": "node dist/index.js",
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js", "run:docker:cluster": "pm2-runtime start pm2.config.js",
"predocker": "yarn build && cp ../../yarn.lock ./dist/", "build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn predocker && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION",
"dev:stack:init": "node ./scripts/dev/manage.js init", "dev:stack:init": "node ./scripts/dev/manage.js init",
"dev:builder": "npm run dev:stack:init && nodemon", "dev:builder": "npm run dev:stack:init && nodemon",
"dev:built": "yarn run dev:stack:init && yarn run run:docker", "dev:built": "yarn run dev:stack:init && yarn run run:docker",
@ -51,9 +51,7 @@
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
"dd-trace": "3.13.2", "dd-trace": "3.13.2",
"dotenv": "8.6.0", "dotenv": "8.6.0",
"elastic-apm-node": "3.38.0",
"global-agent": "3.0.0", "global-agent": "3.0.0",
"got": "11.8.3",
"ical-generator": "4.1.0", "ical-generator": "4.1.0",
"joi": "17.6.0", "joi": "17.6.0",
"koa": "2.13.4", "koa": "2.13.4",

View File

@ -11,7 +11,7 @@ import { TestConfiguration } from "../../../../tests"
import { events } from "@budibase/backend-core" import { events } from "@budibase/backend-core"
// this test can 409 - retries reduce issues with this // this test can 409 - retries reduce issues with this
jest.retryTimes(2) jest.retryTimes(2, { logErrorsBeforeRetry: true })
jest.setTimeout(30000) jest.setTimeout(30000)
mocks.licenses.useScimIntegration() mocks.licenses.useScimIntegration()

3
scripts/build-single-image.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
yarn build --scope @budibase/server --scope @budibase/worker
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest .

View File

@ -15,11 +15,7 @@ const { nodeExternalsPlugin } = require("esbuild-node-externals")
var argv = require("minimist")(process.argv.slice(2)) var argv = require("minimist")(process.argv.slice(2))
function runBuild( function runBuild(entry, outfile) {
entry,
outfile,
opts = { skipMeta: false, bundle: true, silent: false }
) {
const isDev = process.env.NODE_ENV !== "production" const isDev = process.env.NODE_ENV !== "production"
const tsconfig = argv["p"] || `tsconfig.build.json` const tsconfig = argv["p"] || `tsconfig.build.json`
const tsconfigPathPluginContent = JSON.parse( const tsconfigPathPluginContent = JSON.parse(
@ -40,16 +36,12 @@ function runBuild(
] ]
} }
const metafile = !opts.skipMeta
const { bundle } = opts
const sharedConfig = { const sharedConfig = {
entryPoints: [entry], entryPoints: [entry],
bundle, bundle: true,
minify: !isDev, minify: !isDev,
sourcemap: isDev, sourcemap: isDev,
tsconfig, tsconfig,
format: opts?.forcedFormat,
plugins: [ plugins: [
TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }), TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }),
nodeExternalsPlugin(), nodeExternalsPlugin(),
@ -58,10 +50,8 @@ function runBuild(
loader: { loader: {
".svelte": "copy", ".svelte": "copy",
}, },
metafile, metafile: true,
external: bundle external: ["deasync", "mock-aws-s3", "nock", "bull"],
? ["deasync", "mock-aws-s3", "nock", "pino", "koa-pino-logger", "bull"]
: undefined,
} }
build({ build({
@ -74,19 +64,16 @@ function runBuild(
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`) fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
} }
!opts.silent && console.log(
console.log( "\x1b[32m%s\x1b[0m",
"\x1b[32m%s\x1b[0m", `Build successfully in ${(Date.now() - start) / 1000} seconds`
`Build successfully in ${(Date.now() - start) / 1000} seconds` )
)
}) })
if (metafile) { fs.writeFileSync(
fs.writeFileSync( `dist/${path.basename(outfile)}.meta.json`,
`dist/${path.basename(outfile)}.meta.json`, JSON.stringify(result.metafile)
JSON.stringify(result.metafile) )
)
}
}) })
} }

View File

@ -0,0 +1,52 @@
#!/bin/bash
packages_to_remove=(
@budibase/backend-core
@budibase/bbui
@budibase/builder
@budibase/cli
@budibase/client
@budibase/frontend-core
@budibase/pro
@budibase/sdk
@budibase/server
@budibase/shared-core
# We cannot remove string-templates yet because it cannot be bundled by esbuild as a dependency
@budibase/string-templates
@budibase/types
@budibase/worker
)
root_package_json=$(cat "package.json")
process_package() {
local pkg="$1"
local package_json=$(cat "$pkg/package.json")
local has_changes=false
for package_name in "${packages_to_remove[@]}"; do
if echo "$package_json" | jq -e --arg package_name "$package_name" '.dependencies | has($package_name)' > /dev/null; then
package_json=$(echo "$package_json" | jq "del(.dependencies[\"$package_name\"])")
has_changes=true
fi
done
if [ "$has_changes" = true ]; then
echo "$package_json" > "$1/package.json"
fi
}
for pkg in $(echo "$root_package_json" | jq -r '.workspaces.packages[]' ); do
if [[ "$pkg" == *"*"* ]]; then
# Use find to iterate through immediate subdirectories
find "$pkg" -maxdepth 1 -type d -print | while read -r workspace_package; do
process_package "$workspace_package"
done
else
process_package "$pkg"
fi
done
echo "$root_package_json" | jq "del(.resolutions)" > "package.json"

3183
yarn.lock

File diff suppressed because it is too large Load Diff