Merge branch 'master' into feature/buttongroup-component

This commit is contained in:
Michael Drury 2023-10-17 14:08:26 +01:00 committed by GitHub
commit e1ba96c5d0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
71 changed files with 2515 additions and 2180 deletions

View File

@ -1,9 +1,14 @@
packages/server/node_modules
packages/builder
packages/frontend-core
packages/backend-core
packages/worker/node_modules
packages/cli
packages/client
packages/bbui
packages/string-templates
*
!/packages/
!/scripts/
/packages/*/node_modules
packages/server/scripts/
!packages/server/scripts/integrations/oracle
!nx.json
!/hosting/single/
!/hosting/letsencrypt /
!package.json
!yarn.lock
!lerna.json
!.yarnrc

View File

@ -10,7 +10,6 @@ on:
push:
branches:
- master
- develop
pull_request:
workflow_dispatch:
@ -262,11 +261,7 @@ jobs:
branch="${{ github.base_ref || github.ref_name }}"
echo "Running on branch '$branch' (base_ref=${{ github.base_ref }}, ref_name=${{ github.head_ref }})"
if [[ $branch == "master" ]]; then
base_commit=$(git rev-parse origin/master)
elif [[ $branch == "develop" ]]; then
base_commit=$(git rev-parse origin/develop)
fi
if [[ ! -z $base_commit ]]; then
echo "target_branch=$branch"

View File

@ -4,7 +4,13 @@ on:
pull_request:
types: [closed]
branches:
- develop
- master
workflow_dispatch:
inputs:
BRANCH:
type: string
description: Which featurebranch branch to destroy?
required: true
jobs:
release:
@ -13,8 +19,8 @@ jobs:
- uses: actions/checkout@v3
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_BRANCH: ${{ github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.ref }}
PAYLOAD_BRANCH: ${{ github.event_name == 'workflow_dispatch' && github.event.inputs.BRANCH || github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
with:
repository: budibase/budibase-deploys
event: featurebranch-qa-close

View File

@ -3,7 +3,6 @@ name: deploy-featurebranch
on:
pull_request:
branches:
- develop
- master
jobs:

View File

@ -1,41 +0,0 @@
name: "deploy-preprod"
on:
workflow_dispatch:
workflow_call:
jobs:
deploy-to-legacy-preprod-env:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Fail if tag is not in master
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/master; then
echo "Tag is not in master. This pipeline can only execute tags that are present on the master branch"
exit 1
fi
- name: Get the latest budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-preprod-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -1,124 +0,0 @@
name: Budibase Prerelease
concurrency:
group: release-prerelease
cancel-in-progress: false
on:
push:
tags:
- "*-alpha.*"
workflow_dispatch:
env:
# Posthog token used by ui at build time
# disable unless needed for testing
# POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
jobs:
release-images:
runs-on: ubuntu-latest
steps:
- name: Fail if not a tag
run: |
if [[ $GITHUB_REF != refs/tags/* ]]; then
echo "Workflow Dispatch can only be run on tags"
exit 1
fi
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
fetch-depth: 0
- name: Fail if tag is not develop
run: |
if ! git merge-base --is-ancestor ${{ github.sha }} origin/develop; then
echo "Tag is not in develop"
exit 1
fi
- uses: actions/setup-node@v1
with:
node-version: 18.x
- run: yarn install --frozen-lockfile
- name: Update versions
run: ./scripts/updateVersions.sh
- run: yarn build
- run: yarn build:sdk
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
git submodule foreach git commit -a -m 'Release process'
git commit -a -m 'Release process'
echo //registry.npmjs.org/:_authToken=${NPM_TOKEN} >> .npmrc
yarn release:develop
- name: Build/release Docker images
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build:docker:develop
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
release-helm-chart:
needs: [release-images]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Helm
uses: azure/setup-helm@v1
id: helm-install
# due to helm repo index issue: https://github.com/helm/helm/issues/7363
# we need to create new package in a different dir, merge the index and move the package back
- name: Build and release helm chart
run: |
git config user.name "Budibase Helm Bot"
git config user.email "<>"
git reset --hard
git fetch
mkdir sync
echo "Packaging chart to sync dir"
helm package charts/budibase --version 0.0.0-develop --app-version develop --destination sync
echo "Packaging successful"
git checkout gh-pages
echo "Indexing helm repo"
helm repo index --merge docs/index.yaml sync
mv -f sync/* docs
rm -rf sync
echo "Pushing new helm release"
git add -A
git commit -m "Helm Release: develop"
git push
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -110,19 +110,13 @@ jobs:
git commit -m "Helm Release: ${{ env.RELEASE_VERSION }}"
git push
deploy-to-legacy-preprod-env:
needs: [release-images]
uses: ./.github/workflows/deploy-preprod.yml
secrets: inherit
# Trigger deploy to new EKS preprod environment
trigger-deploy-to-preprod-env:
trigger-deploy-to-qa-env:
needs: [release-helm-chart]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Get the latest budibase release version
- name: Get the current budibase release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
@ -133,5 +127,5 @@ jobs:
PAYLOAD_VERSION: ${{ env.RELEASE_VERSION }}
with:
repository: budibase/budibase-deploys
event: budicloud-preprod-deploy
event: budicloud-qa-deploy
github_pat: ${{ secrets.GH_ACCESS_TOKEN }}

View File

@ -0,0 +1,69 @@
name: Test
on:
workflow_dispatch:
env:
CI: true
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
NX_CLOUD_ACCESS_TOKEN: ${{ secrets.NX_CLOUD_ACCESS_TOKEN }}
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [18.x]
steps:
- name: "Checkout"
uses: actions/checkout@v4
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v3
with:
node-version: ${{ matrix.node-version }}
cache: "yarn"
- name: Setup QEMU
uses: docker/setup-qemu-action@v3
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v3
- name: Run Yarn
run: yarn
- name: Run Yarn Build
run: yarn build --scope @budibase/server --scope @budibase/worker
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v5
with:
context: .
push: true
pull: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase-test:test
file: ./hosting/single/Dockerfile.v2
cache-from: type=registry,ref=budibase/budibase-test:test
cache-to: type=inline
- name: Tag and release Budibase Azure App Service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64
build-args: TARGETBUILD=aas
tags: budibase/budibase-test:aas
file: ./hosting/single/Dockerfile.v2

View File

@ -1,42 +0,0 @@
name: Tag prerelease
concurrency:
group: tag-prerelease
cancel-in-progress: false
on:
push:
branches:
- develop
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch:
jobs:
tag-prerelease:
runs-on: ubuntu-latest
steps:
- name: Fail if branch is not develop
if: github.ref != 'refs/heads/develop'
run: |
echo "Ref is not develop, you must run this job from develop."
exit 1
- uses: actions/checkout@v2
with:
submodules: true
token: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
- run: cd scripts && yarn
- name: Tag prerelease
run: |
cd scripts
# setup the username and email.
git config --global user.name "Budibase Staging Release Bot"
git config --global user.email "<>"
./versionCommit.sh prerelease

View File

@ -4,17 +4,6 @@ concurrency:
cancel-in-progress: false
on:
push:
branches:
- master
paths:
- ".aws/**"
- ".github/**"
- "charts/**"
- "packages/**"
- "scripts/**"
- "package.json"
- "yarn.lock"
workflow_dispatch:
inputs:
versioning:

View File

@ -1 +1 @@
network-timeout 100000
network-timeout 1000000

View File

@ -138,6 +138,8 @@ To develop the Budibase platform you'll need [Docker](https://www.docker.com/) a
`yarn setup` will check that all necessary components are installed and setup the repo for usage.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above command.
##### Manual method
The following commands can be executed to manually get Budibase up and running (assuming Docker/Docker Compose has been installed).
@ -146,6 +148,8 @@ The following commands can be executed to manually get Budibase up and running (
`yarn build` will build all budibase packages.
If you have access to the `@budibase/pro` submodule then please follow the Pro section of this guide before running the above commands.
#### 4. Running
To run the budibase server and builder in dev mode (i.e. with live reloading):

View File

@ -0,0 +1,126 @@
FROM node:18-slim as build
# install node-gyp dependencies
RUN apt-get update && apt-get install -y --no-install-recommends g++ make python3 jq
# copy and install dependencies
WORKDIR /app
COPY package.json .
COPY yarn.lock .
COPY lerna.json .
COPY .yarnrc .
COPY packages/server/package.json packages/server/package.json
COPY packages/worker/package.json packages/worker/package.json
# string-templates does not get bundled during the esbuild process, so we want to use the local version
COPY packages/string-templates/package.json packages/string-templates/package.json
COPY scripts/removeWorkspaceDependencies.sh scripts/removeWorkspaceDependencies.sh
RUN chmod +x ./scripts/removeWorkspaceDependencies.sh
RUN ./scripts/removeWorkspaceDependencies.sh
# We will never want to sync pro, but the script is still required
RUN echo '' > scripts/syncProPackage.js
RUN jq 'del(.scripts.postinstall)' package.json > temp.json && mv temp.json package.json
RUN --mount=type=cache,target=/root/.yarn YARN_CACHE_FOLDER=/root/.yarn yarn install --production
# copy the actual code
COPY packages/server/dist packages/server/dist
COPY packages/server/pm2.config.js packages/server/pm2.config.js
COPY packages/server/client packages/server/client
COPY packages/server/builder packages/server/builder
COPY packages/worker/dist packages/worker/dist
COPY packages/worker/pm2.config.js packages/worker/pm2.config.js
COPY packages/string-templates packages/string-templates
FROM budibase/couchdb as runner
ARG TARGETARCH
ENV TARGETARCH $TARGETARCH
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD=single
ENV TARGETBUILD $TARGETBUILD
# install base dependencies
RUN apt-get update && \
apt-get install -y --no-install-recommends software-properties-common nginx uuid-runtime redis-server
# Install postgres client for pg_dump utils
RUN apt install software-properties-common apt-transport-https gpg -y \
&& curl -fsSl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | tee /usr/share/keyrings/postgresql.gpg > /dev/null \
&& echo deb [arch=amd64,arm64,ppc64el signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | tee /etc/apt/sources.list.d/postgresql.list \
&& apt update -y \
&& apt install postgresql-client-15 -y \
&& apt remove software-properties-common apt-transport-https gpg -y
# install other dependencies, nodejs, oracle requirements, jdk8, redis, nginx
WORKDIR /nodejs
RUN curl -sL https://deb.nodesource.com/setup_18.x -o /tmp/nodesource_setup.sh && \
bash /tmp/nodesource_setup.sh && \
apt-get install -y --no-install-recommends libaio1 nodejs && \
npm install --global yarn pm2
# setup nginx
COPY hosting/single/nginx/nginx.conf /etc/nginx
COPY hosting/single/nginx/nginx-default-site.conf /etc/nginx/sites-enabled/default
RUN mkdir -p /var/log/nginx && \
touch /var/log/nginx/error.log && \
touch /var/run/nginx.pid && \
usermod -a -G tty www-data
WORKDIR /
RUN mkdir -p scripts/integrations/oracle
COPY packages/server/scripts/integrations/oracle scripts/integrations/oracle
RUN /bin/bash -e ./scripts/integrations/oracle/instantclient/linux/install.sh
# setup minio
WORKDIR /minio
COPY scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh
# setup runner file
WORKDIR /
COPY hosting/single/runner.sh .
RUN chmod +x ./runner.sh
COPY hosting/single/healthcheck.sh .
RUN chmod +x ./healthcheck.sh
# Script below sets the path for storing data based on $DATA_DIR
# For Azure App Service install SSH & point data locations to /home
COPY hosting/single/ssh/sshd_config /etc/
COPY hosting/single/ssh/ssh_setup.sh /tmp
RUN /build-target-paths.sh
# setup letsencrypt certificate
RUN apt-get install -y certbot python3-certbot-nginx
COPY hosting/letsencrypt /app/letsencrypt
RUN chmod +x /app/letsencrypt/certificate-request.sh /app/letsencrypt/certificate-renew.sh
COPY --from=build /app/node_modules /node_modules
COPY --from=build /app/package.json /package.json
COPY --from=build /app/packages/server /app
COPY --from=build /app/packages/worker /worker
COPY --from=build /app/packages/string-templates /string-templates
RUN cd /string-templates && yarn link && cd ../app && yarn link @budibase/string-templates && cd ../worker && yarn link @budibase/string-templates
EXPOSE 80
EXPOSE 443
# Expose port 2222 for SSH on Azure App Service build
EXPOSE 2222
VOLUME /data
HEALTHCHECK --interval=15s --timeout=15s --start-period=45s CMD "/healthcheck.sh"
# must set this just before running
ENV NODE_ENV=production
WORKDIR /
CMD ["./runner.sh"]

View File

@ -7,16 +7,16 @@ declare -a DOCKER_VARS=("APP_PORT" "APPS_URL" "ARCHITECTURE" "BUDIBASE_ENVIRONME
[[ -z "${BUDIBASE_ENVIRONMENT}" ]] && export BUDIBASE_ENVIRONMENT=PRODUCTION
[[ -z "${CLUSTER_PORT}" ]] && export CLUSTER_PORT=80
[[ -z "${DEPLOYMENT_ENVIRONMENT}" ]] && export DEPLOYMENT_ENVIRONMENT=docker
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://localhost:9000
[[ -z "${MINIO_URL}" ]] && export MINIO_URL=http://127.0.0.1:9000
[[ -z "${NODE_ENV}" ]] && export NODE_ENV=production
[[ -z "${POSTHOG_TOKEN}" ]] && export POSTHOG_TOKEN=phc_bIjZL7oh2GEUd2vqvTBH8WvrX0fWTFQMs6H5KQxiUxU
[[ -z "${TENANT_FEATURE_FLAGS}" ]] && export TENANT_FEATURE_FLAGS="*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"
[[ -z "${ACCOUNT_PORTAL_URL}" ]] && export ACCOUNT_PORTAL_URL=https://account.budibase.app
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=localhost:6379
[[ -z "${REDIS_URL}" ]] && export REDIS_URL=127.0.0.1:6379
[[ -z "${SELF_HOSTED}" ]] && export SELF_HOSTED=1
[[ -z "${WORKER_PORT}" ]] && export WORKER_PORT=4002
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://localhost:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://localhost:4001
[[ -z "${WORKER_URL}" ]] && export WORKER_URL=http://127.0.0.1:4002
[[ -z "${APPS_URL}" ]] && export APPS_URL=http://127.0.0.1:4001
[[ -z "${SERVER_TOP_LEVEL_PATH}" ]] && export SERVER_TOP_LEVEL_PATH=/app
# export CUSTOM_DOMAIN=budi001.custom.com
@ -51,7 +51,7 @@ do
fi
done
if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@127.0.0.1:5984
fi
if [ ! -f "${DATA_DIR}/.env" ]; then
touch ${DATA_DIR}/.env

View File

@ -1,5 +1,5 @@
{
"version": "2.11.34",
"version": "2.11.35",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -8,5 +8,9 @@
}
}
},
"targetDefaults": {}
"targetDefaults": {
"build": {
"inputs": ["{workspaceRoot}/scripts/build.js"]
}
}
}

View File

@ -3,14 +3,11 @@
"private": true,
"devDependencies": {
"@esbuild-plugins/tsconfig-paths": "^0.1.2",
"@nx/js": "16.4.3",
"@rollup/plugin-json": "^4.0.2",
"@typescript-eslint/parser": "6.7.2",
"esbuild": "^0.18.17",
"esbuild-node-externals": "^1.8.0",
"eslint": "^8.44.0",
"husky": "^8.0.3",
"js-yaml": "^4.1.0",
"kill-port": "^1.6.1",
"lerna": "7.1.1",
"madge": "^6.0.0",
@ -19,8 +16,6 @@
"nx-cloud": "16.0.5",
"prettier": "2.8.8",
"prettier-plugin-svelte": "^2.3.0",
"rimraf": "^3.0.2",
"rollup-plugin-replace": "^2.2.0",
"svelte": "3.49.0",
"typescript": "5.2.2",
"@babel/core": "^7.22.5",
@ -51,7 +46,7 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up && lerna run --stream dev:builder --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --stream dev:builder --scope @budibase/worker --scope @budibase/server",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build:docker:pre && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"dev:docker": "yarn build && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
"test": "lerna run --stream test --stream",
"lint:eslint": "eslint packages qa-core --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\" && prettier --check \"qa-core/**/*.{js,ts,svelte}\"",
@ -61,7 +56,6 @@
"lint:fix": "yarn run lint:fix:prettier && yarn run lint:fix:eslint",
"build:specs": "lerna run --stream specs",
"build:docker": "lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
"build:docker:pre": "yarn build && lerna run --stream predocker",
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
"build:docker:selfhost": "lerna run --stream build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
"build:docker:develop": "node scripts/pinVersions && lerna run --stream build:docker && yarn build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
@ -69,8 +63,7 @@
"build:docker:airgap:single": "SINGLE_IMAGE=1 node hosting/scripts/airgapped/airgappedDockerBuild",
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single:image": "docker build -f hosting/single/Dockerfile -t budibase:latest .",
"build:docker:single": "yarn build && lerna run --concurrency 1 predocker && yarn build:docker:single:image",
"build:docker:single": "./scripts/build-single-image.sh",
"build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting",
"publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb",
"publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting",

View File

@ -26,7 +26,7 @@
"@budibase/shared-core": "0.0.0",
"@budibase/types": "0.0.0",
"@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0",
"aws-cloudfront-sign": "3.0.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",

View File

@ -1,5 +1,5 @@
import env from "../environment"
const cfsign = require("aws-cloudfront-sign")
import * as cfsign from "aws-cloudfront-sign"
let PRIVATE_KEY: string | undefined
@ -21,7 +21,7 @@ function getPrivateKey() {
const getCloudfrontSignParams = () => {
return {
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID,
keypairId: env.CLOUDFRONT_PUBLIC_KEY_ID!,
privateKeyString: getPrivateKey(),
expireTime: new Date().getTime() + 1000 * 60 * 60, // 1 hour
}

View File

@ -6,6 +6,7 @@ import {
AutomationStepIdArray,
AutomationIOType,
AutomationCustomIOType,
DatasourceFeature,
} from "@budibase/types"
import joi from "joi"
@ -67,9 +68,27 @@ function validateDatasource(schema: any) {
version: joi.string().optional(),
schema: joi.object({
docs: joi.string(),
plus: joi.boolean().optional(),
isSQL: joi.boolean().optional(),
auth: joi
.object({
type: joi.string().required(),
})
.optional(),
features: joi
.object(
Object.fromEntries(
Object.values(DatasourceFeature).map(key => [
key,
joi.boolean().optional(),
])
)
)
.optional(),
relationships: joi.boolean().optional(),
description: joi.string().required(),
friendlyName: joi.string().required(),
type: joi.string().allow(...DATASOURCE_TYPES),
description: joi.string().required(),
datasource: joi.object().pattern(joi.string(), fieldValidator).required(),
query: joi
.object()

View File

@ -82,9 +82,9 @@
"@spectrum-css/vars": "3.0.1",
"dayjs": "^1.10.8",
"easymde": "^2.16.1",
"svelte-dnd-action": "^0.9.8",
"svelte-flatpickr": "3.2.3",
"svelte-portal": "^1.0.0",
"svelte-dnd-action": "^0.9.8"
"svelte-portal": "^1.0.0"
},
"resolutions": {
"loader-utils": "1.4.1"

View File

@ -1,8 +0,0 @@
const ncp = require("ncp").ncp
ncp("./dist", "../server/builder", function (err) {
if (err) {
return console.error(err)
}
console.log("Copied dist folder to ../server/builder")
})

View File

@ -85,8 +85,8 @@
"@babel/core": "^7.12.14",
"@babel/plugin-transform-runtime": "^7.13.10",
"@babel/preset-env": "^7.13.12",
"@rollup/plugin-replace": "^2.4.2",
"@roxi/routify": "2.18.5",
"@rollup/plugin-replace": "^5.0.3",
"@roxi/routify": "2.18.12",
"@sveltejs/vite-plugin-svelte": "1.0.1",
"@testing-library/jest-dom": "5.17.0",
"@testing-library/svelte": "^3.2.2",
@ -95,16 +95,18 @@
"jest": "29.6.2",
"jsdom": "^21.1.1",
"ncp": "^2.0.0",
"rollup": "^2.44.0",
"svelte": "^3.48.0",
"svelte-jester": "^1.3.2",
"vite": "^3.0.8",
"vite-plugin-static-copy": "^0.16.0",
"vite": "^4.4.11",
"vite-plugin-static-copy": "^0.17.0",
"vitest": "^0.29.2"
},
"nx": {
"targets": {
"build": {
"outputs": [
"{workspaceRoot}/packages/server/builder"
],
"dependsOn": [
{
"projects": [

View File

@ -36,7 +36,7 @@
import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
const AUTO_TYPE = "auto"
const AUTO_TYPE = FIELDS.AUTO.type
const FORMULA_TYPE = FIELDS.FORMULA.type
const LINK_TYPE = FIELDS.LINK.type
const STRING_TYPE = FIELDS.STRING.type
@ -60,9 +60,14 @@
{}
)
function makeFieldId(type, subtype) {
function makeFieldId(type, subtype, autocolumn) {
// don't make field IDs for auto types
if (type === AUTO_TYPE || autocolumn) {
return type.toUpperCase()
} else {
return `${type}${subtype || ""}`.toUpperCase()
}
}
let originalName
let linkEditDisabled
@ -183,7 +188,8 @@
if (!savingColumn) {
editableColumn.fieldId = makeFieldId(
editableColumn.type,
editableColumn.subtype
editableColumn.subtype,
editableColumn.autocolumn
)
allowedTypes = getAllowedTypes().map(t => ({
@ -419,7 +425,7 @@
FIELDS.FORMULA,
FIELDS.JSON,
isUsers ? FIELDS.USERS : FIELDS.USER,
{ name: "Auto Column", type: AUTO_TYPE },
FIELDS.AUTO,
]
} else {
let fields = [
@ -538,7 +544,7 @@
getOptionValue={field => field.fieldId}
getOptionIcon={field => field.icon}
isOptionEnabled={option => {
if (option.type == AUTO_TYPE) {
if (option.type === AUTO_TYPE) {
return availableAutoColumnKeys?.length > 0
}
return true

View File

@ -13,6 +13,8 @@
import { Helpers } from "@budibase/bbui"
import { RelationshipErrorChecker } from "./relationshipErrors"
import { onMount } from "svelte"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
import { PrettyRelationshipDefinitions } from "constants/backend"
export let save
export let datasource
@ -22,16 +24,21 @@
export let selectedFromTable
export let close
const relationshipTypes = [
{
label: "One to Many",
value: RelationshipType.MANY_TO_ONE,
let relationshipMap = {
[RelationshipType.MANY_TO_MANY]: {
part1: PrettyRelationshipDefinitions.MANY,
part2: PrettyRelationshipDefinitions.MANY,
},
{
label: "Many to Many",
value: RelationshipType.MANY_TO_MANY,
[RelationshipType.MANY_TO_ONE]: {
part1: PrettyRelationshipDefinitions.ONE,
part2: PrettyRelationshipDefinitions.MANY,
},
]
}
let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
let relationshipPart1 = PrettyRelationshipDefinitions.MANY
let relationshipPart2 = PrettyRelationshipDefinitions.ONE
let originalFromColumnName = toRelationship.name,
originalToColumnName = fromRelationship.name
@ -49,14 +56,32 @@
)
let errors = {}
let fromPrimary, fromForeign, fromColumn, toColumn
let fromId, toId, throughId, throughToKey, throughFromKey
let throughId, throughToKey, throughFromKey
let isManyToMany, isManyToOne, relationshipType
let hasValidated = false
$: fromId = null
$: toId = null
$: tableOptions = plusTables.map(table => ({
label: table.name,
value: table._id,
name: table.name,
_id: table._id,
}))
$: {
// Determine the relationship type based on the selected values of both parts
relationshipType = Object.entries(relationshipMap).find(
([_, parts]) =>
parts.part1 === relationshipPart1 && parts.part2 === relationshipPart2
)?.[0]
changed(() => {
hasValidated = false
})
}
$: valid =
getErrorCount(errors) === 0 && allRequiredAttributesSet(relationshipType)
$: isManyToMany = relationshipType === RelationshipType.MANY_TO_MANY
@ -338,33 +363,34 @@
onConfirm={saveRelationship}
disabled={!valid}
>
<Select
label="Relationship type"
options={relationshipTypes}
bind:value={relationshipType}
bind:error={errors.relationshipType}
on:change={() =>
changed(() => {
hasValidated = false
})}
/>
<div class="headings">
<Detail>Tables</Detail>
</div>
{#if !selectedFromTable}
<Select
label="Select from table"
options={tableOptions}
bind:value={fromId}
bind:error={errors.fromTable}
on:change={e =>
<RelationshipSelector
bind:relationshipPart1
bind:relationshipPart2
bind:relationshipTableIdPrimary={fromId}
bind:relationshipTableIdSecondary={toId}
{relationshipOpts1}
{relationshipOpts2}
{tableOptions}
{errors}
primaryDisabled={selectedFromTable}
primaryTableChanged={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
fromColumn = table?.name || ""
fromPrimary = table?.primary?.[0]
})}
secondaryTableChanged={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{/if}
{#if isManyToOne && fromId}
<Select
label={`Primary Key (${getTable(fromId).name})`}
@ -374,18 +400,6 @@
on:change={changed}
/>
{/if}
<Select
label={"Select to table"}
options={tableOptions}
bind:value={toId}
bind:error={errors.toTable}
on:change={e =>
changed(() => {
const table = plusTables.find(tbl => tbl._id === e.detail)
toColumn = table.name || ""
fromForeign = null
})}
/>
{#if isManyToMany}
<Select
label={"Through"}

View File

@ -57,7 +57,7 @@
{#if $store.error}
<InlineAlert
type="error"
header={$store.error.title}
header="Error fetching {tableType}"
message={$store.error.description}
/>
{/if}

View File

@ -1,6 +1,6 @@
import { derived, writable, get } from "svelte/store"
import { keepOpen, notifications } from "@budibase/bbui"
import { datasources, ImportTableError, tables } from "stores/backend"
import { datasources, tables } from "stores/backend"
export const createTableSelectionStore = (integration, datasource) => {
const tableNamesStore = writable([])
@ -30,12 +30,7 @@ export const createTableSelectionStore = (integration, datasource) => {
notifications.success(`Tables fetched successfully.`)
await onComplete()
} catch (err) {
if (err instanceof ImportTableError) {
errorStore.set(err)
} else {
notifications.error("Error fetching tables.")
}
return keepOpen
}
}

View File

@ -6,11 +6,14 @@
export let relationshipTableIdPrimary
export let relationshipTableIdSecondary
export let editableColumn
export let linkEditDisabled
export let linkEditDisabled = false
export let tableOptions
export let errors
export let relationshipOpts1
export let relationshipOpts2
export let primaryTableChanged
export let secondaryTableChanged
export let primaryDisabled = true
</script>
<div class="relationship-container">
@ -19,16 +22,19 @@
disabled={linkEditDisabled}
bind:value={relationshipPart1}
options={relationshipOpts1}
bind:error={errors.relationshipType}
/>
</div>
<div class="relationship-label">in</div>
<div class="relationship-part">
<Select
disabled
disabled={primaryDisabled}
options={tableOptions}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
bind:value={relationshipTableIdPrimary}
on:change={primaryTableChanged}
bind:error={errors.fromTable}
/>
</div>
</div>
@ -46,20 +52,24 @@
<Select
disabled={linkEditDisabled}
bind:value={relationshipTableIdSecondary}
bind:error={errors.toTable}
options={tableOptions.filter(
table => table._id !== relationshipTableIdPrimary
)}
getOptionLabel={table => table.name}
getOptionValue={table => table._id}
on:change={secondaryTableChanged}
/>
</div>
</div>
<Input
{#if editableColumn}
<Input
disabled={linkEditDisabled}
label={`Column name in other table`}
bind:value={editableColumn.fieldName}
error={errors.relatedName}
/>
/>
{/if}
<style>
.relationship-container {

View File

@ -54,6 +54,7 @@
label="App export"
on:change={e => {
file = e.detail?.[0]
encrypted = file?.name?.endsWith(".enc.tar.gz")
}}
/>
<Toggle text="Encrypted" bind:value={encrypted} />

View File

@ -1,5 +1,21 @@
import { FieldType, FieldSubtype } from "@budibase/types"
export const AUTO_COLUMN_SUB_TYPES = {
AUTO_ID: "autoID",
CREATED_BY: "createdBy",
CREATED_AT: "createdAt",
UPDATED_BY: "updatedBy",
UPDATED_AT: "updatedAt",
}
export const AUTO_COLUMN_DISPLAY_NAMES = {
AUTO_ID: "Auto ID",
CREATED_BY: "Created By",
CREATED_AT: "Created At",
UPDATED_BY: "Updated By",
UPDATED_AT: "Updated At",
}
export const FIELDS = {
STRING: {
name: "Text",
@ -107,6 +123,12 @@ export const FIELDS = {
presence: false,
},
},
AUTO: {
name: "Auto Column",
type: FieldType.AUTO,
icon: "MagicWand",
constraints: {},
},
FORMULA: {
name: "Formula",
type: FieldType.FORMULA,
@ -139,22 +161,6 @@ export const FIELDS = {
},
}
export const AUTO_COLUMN_SUB_TYPES = {
AUTO_ID: "autoID",
CREATED_BY: "createdBy",
CREATED_AT: "createdAt",
UPDATED_BY: "updatedBy",
UPDATED_AT: "updatedAt",
}
export const AUTO_COLUMN_DISPLAY_NAMES = {
AUTO_ID: "Auto ID",
CREATED_BY: "Created By",
CREATED_AT: "Created At",
UPDATED_BY: "Updated By",
UPDATED_AT: "Updated At",
}
export const FILE_TYPES = {
IMAGE: ["png", "tiff", "gif", "raw", "jpg", "jpeg"],
CODE: ["js", "rs", "py", "java", "rb", "hs", "yml"],

View File

@ -62,7 +62,14 @@
</div>
{/if}
<div class="truncate">
<Body>{getSubtitle(datasource)}</Body>
<Body>
{@const subtitle = getSubtitle(datasource)}
{#if subtitle}
{subtitle}
{:else}
{Object.values(datasource.config).join(" / ")}
{/if}
</Body>
</div>
</div>
<div class="right">

View File

@ -13,7 +13,7 @@
import ExportAppModal from "components/start/ExportAppModal.svelte"
import ImportAppModal from "components/start/ImportAppModal.svelte"
$: filteredApps = $apps.filter(app => app.devId == $store.appId)
$: filteredApps = $apps.filter(app => app.devId === $store.appId)
$: app = filteredApps.length ? filteredApps[0] : {}
$: appDeployed = app?.status === AppStatus.DEPLOYED

View File

@ -9,15 +9,19 @@ import { API } from "api"
import { DatasourceFeature } from "@budibase/types"
import { TableNames } from "constants"
export class ImportTableError extends Error {
constructor(message) {
super(message)
const [title, description] = message.split(" - ")
class TableImportError extends Error {
constructor(errors) {
super()
this.name = "TableImportError"
this.errors = errors
}
this.name = "TableSelectionError"
// Capitalize the first character of both the title and description
this.title = title[0].toUpperCase() + title.substr(1)
this.description = description[0].toUpperCase() + description.substr(1)
get description() {
let message = ""
for (const key in this.errors) {
message += `${key}: ${this.errors[key]}\n`
}
return message
}
}
@ -25,7 +29,6 @@ export function createDatasourcesStore() {
const store = writable({
list: [],
selectedDatasourceId: null,
schemaError: null,
})
const derivedStore = derived([store, tables], ([$store, $tables]) => {
@ -75,18 +78,13 @@ export function createDatasourcesStore() {
store.update(state => ({
...state,
selectedDatasourceId: id,
// Remove any possible schema error
schemaError: null,
}))
}
const updateDatasource = response => {
const { datasource, error } = response
if (error) {
store.update(state => ({
...state,
schemaError: error,
}))
const { datasource, errors } = response
if (errors && Object.keys(errors).length > 0) {
throw new TableImportError(errors)
}
replaceDatasource(datasource._id, datasource)
select(datasource._id)
@ -94,20 +92,11 @@ export function createDatasourcesStore() {
}
const updateSchema = async (datasource, tablesFilter) => {
try {
const response = await API.buildDatasourceSchema({
datasourceId: datasource?._id,
tablesFilter,
})
updateDatasource(response)
} catch (e) {
// buildDatasourceSchema call returns user presentable errors with two parts divided with a " - ".
if (e.message.split(" - ").length === 2) {
throw new ImportTableError(e.message)
} else {
throw e
}
}
}
const sourceCount = source => {
@ -136,6 +125,7 @@ export function createDatasourcesStore() {
config,
name: `${integration.friendlyName}${nameModifier}`,
plus: integration.plus && integration.name !== IntegrationTypes.REST,
isSQL: integration.isSQL,
}
if (await checkDatasourceValidity(integration, datasource)) {
@ -171,12 +161,6 @@ export function createDatasourcesStore() {
replaceDatasource(datasource._id, null)
}
const removeSchemaError = () => {
store.update(state => {
return { ...state, schemaError: null }
})
}
const replaceDatasource = (datasourceId, datasource) => {
if (!datasourceId) {
return
@ -229,7 +213,6 @@ export function createDatasourcesStore() {
create,
update,
delete: deleteDatasource,
removeSchemaError,
replaceDatasource,
getTableNames,
}

View File

@ -4,7 +4,7 @@ export { views } from "./views"
export { viewsV2 } from "./viewsV2"
export { permissions } from "./permissions"
export { roles } from "./roles"
export { datasources, ImportTableError } from "./datasources"
export { datasources } from "./datasources"
export { integrations } from "./integrations"
export { sortedIntegrations } from "./sortedIntegrations"
export { queries } from "./queries"

View File

@ -5788,6 +5788,21 @@
}
]
},
{
"type": "event",
"label": "On row click",
"key": "onRowClick",
"context": [
{
"label": "Clicked row",
"key": "row"
}
],
"dependsOn": {
"setting": "allowEditRows",
"value": false
}
},
{
"type": "boolean",
"label": "Add rows",

View File

@ -14,12 +14,14 @@
export let initialSortOrder = null
export let fixedRowHeight = null
export let columns = null
export let onRowClick = null
const component = getContext("component")
const { styleable, API, builderStore, notificationStore } = getContext("sdk")
$: columnWhitelist = columns?.map(col => col.name)
$: schemaOverrides = getSchemaOverrides(columns)
$: handleRowClick = allowEditRows ? undefined : onRowClick
const getSchemaOverrides = columns => {
let overrides = {}
@ -56,6 +58,7 @@
showControls={false}
notifySuccess={notificationStore.actions.success}
notifyError={notificationStore.actions.error}
on:rowclick={e => handleRowClick?.({ row: e.detail })}
/>
</div>

View File

@ -17,13 +17,24 @@
const { config, dispatch, selectedRows } = getContext("grid")
const svelteDispatch = createEventDispatcher()
const select = () => {
const select = e => {
e.stopPropagation()
svelteDispatch("select")
const id = row?._id
if (id) {
selectedRows.actions.toggleRow(id)
}
}
const bulkDelete = e => {
e.stopPropagation()
dispatch("request-bulk-delete")
}
const expand = e => {
e.stopPropagation()
svelteDispatch("expand")
}
</script>
<GridCell
@ -56,7 +67,7 @@
{/if}
{/if}
{#if rowSelected && $config.canDeleteRows}
<div class="delete" on:click={() => dispatch("request-bulk-delete")}>
<div class="delete" on:click={bulkDelete}>
<Icon
name="Delete"
size="S"
@ -65,12 +76,7 @@
</div>
{:else}
<div class="expand" class:visible={$config.canExpandRows && expandable}>
<Icon
size="S"
name="Maximize"
hoverable
on:click={() => svelteDispatch("expand")}
/>
<Icon size="S" name="Maximize" hoverable on:click={expand} />
</div>
{/if}
</div>

View File

@ -35,7 +35,7 @@
</script>
<div bind:this={body} class="grid-body">
<GridScrollWrapper scrollHorizontally scrollVertically wheelInteractive>
<GridScrollWrapper scrollHorizontally scrollVertically attachHandlers>
{#each $renderedRows as row, idx}
<GridRow
{row}

View File

@ -17,6 +17,7 @@
columnHorizontalInversionIndex,
contentLines,
isDragging,
dispatch,
} = getContext("grid")
$: rowSelected = !!$selectedRows[row._id]
@ -30,6 +31,7 @@
on:focus
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
>
{#each $renderedColumns as column, columnIdx (column.name)}
{@const cellId = `${row._id}-${column.name}`}

View File

@ -17,7 +17,11 @@
export let scrollVertically = false
export let scrollHorizontally = false
export let wheelInteractive = false
export let attachHandlers = false
// Used for tracking touch events
let initialTouchX
let initialTouchY
$: style = generateStyle($scroll, $rowHeight, $hiddenColumnsWidth)
@ -27,17 +31,47 @@
return `transform: translate3d(${offsetX}px, ${offsetY}px, 0);`
}
// Handles a wheel even and updates the scroll offsets
// Handles a mouse wheel event and updates scroll state
const handleWheel = e => {
e.preventDefault()
debouncedHandleWheel(e.deltaX, e.deltaY, e.clientY)
updateScroll(e.deltaX, e.deltaY, e.clientY)
// If a context menu was visible, hide it
if ($menu.visible) {
menu.actions.close()
}
}
const debouncedHandleWheel = domDebounce((deltaX, deltaY, clientY) => {
// Handles touch start events
const handleTouchStart = e => {
if (!e.touches?.[0]) return
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
}
// Handles touch move events and updates scroll state
const handleTouchMove = e => {
if (!e.touches?.[0]) return
e.preventDefault()
// Compute delta from previous event, and update scroll
const deltaX = initialTouchX - e.touches[0].clientX
const deltaY = initialTouchY - e.touches[0].clientY
updateScroll(deltaX, deltaY)
// Store position to reference in next event
initialTouchX = e.touches[0].clientX
initialTouchY = e.touches[0].clientY
// If a context menu was visible, hide it
if ($menu.visible) {
menu.actions.close()
}
}
// Updates the scroll offset by a certain delta, and ensure scrolling
// stays within sensible bounds. Debounced for performance.
const updateScroll = domDebounce((deltaX, deltaY, clientY) => {
const { top, left } = $scroll
// Calculate new scroll top
@ -55,15 +89,19 @@
})
// Hover row under cursor
if (clientY != null) {
const y = clientY - $bounds.top + (newScrollTop % $rowHeight)
const hoveredRow = $renderedRows[Math.floor(y / $rowHeight)]
hoveredRowId.set(hoveredRow?._id)
}
})
</script>
<div
class="outer"
on:wheel={wheelInteractive ? handleWheel : null}
on:wheel={attachHandlers ? handleWheel : null}
on:touchstart={attachHandlers ? handleTouchStart : null}
on:touchmove={attachHandlers ? handleTouchMove : null}
on:click|self={() => ($focusedCellId = null)}
>
<div {style} class="inner">

View File

@ -205,7 +205,7 @@
{/if}
</div>
<div class="normal-columns" transition:fade|local={{ duration: 130 }}>
<GridScrollWrapper scrollHorizontally wheelInteractive>
<GridScrollWrapper scrollHorizontally attachHandlers>
<div class="row">
{#each $renderedColumns as column, columnIdx}
{@const cellId = `new-${column.name}`}

View File

@ -64,7 +64,7 @@
</div>
<div class="content" on:mouseleave={() => ($hoveredRowId = null)}>
<GridScrollWrapper scrollVertically wheelInteractive>
<GridScrollWrapper scrollVertically attachHandlers>
{#each $renderedRows as row, idx}
{@const rowSelected = !!$selectedRows[row._id]}
{@const rowHovered = $hoveredRowId === row._id}
@ -74,6 +74,7 @@
class="row"
on:mouseenter={$isDragging ? null : () => ($hoveredRowId = row._id)}
on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)}
on:click={() => dispatch("rowclick", row)}
>
<GutterCell {row} {rowFocused} {rowHovered} {rowSelected} />
{#if $stickyColumn}

View File

@ -53,18 +53,27 @@
}
}
const getLocation = e => {
return {
y: e.touches?.[0]?.clientY ?? e.clientY,
x: e.touches?.[0]?.clientX ?? e.clientX,
}
}
// V scrollbar drag handlers
const startVDragging = e => {
e.preventDefault()
initialMouse = e.clientY
initialMouse = getLocation(e).y
initialScroll = $scrollTop
document.addEventListener("mousemove", moveVDragging)
document.addEventListener("touchmove", moveVDragging)
document.addEventListener("mouseup", stopVDragging)
document.addEventListener("touchend", stopVDragging)
isDraggingV = true
closeMenu()
}
const moveVDragging = domDebounce(e => {
const delta = e.clientY - initialMouse
const delta = getLocation(e).y - initialMouse
const weight = delta / availHeight
const newScrollTop = initialScroll + weight * $maxScrollTop
scroll.update(state => ({
@ -74,22 +83,26 @@
})
const stopVDragging = () => {
document.removeEventListener("mousemove", moveVDragging)
document.removeEventListener("touchmove", moveVDragging)
document.removeEventListener("mouseup", stopVDragging)
document.removeEventListener("touchend", stopVDragging)
isDraggingV = false
}
// H scrollbar drag handlers
const startHDragging = e => {
e.preventDefault()
initialMouse = e.clientX
initialMouse = getLocation(e).x
initialScroll = $scrollLeft
document.addEventListener("mousemove", moveHDragging)
document.addEventListener("touchmove", moveHDragging)
document.addEventListener("mouseup", stopHDragging)
document.addEventListener("touchend", stopHDragging)
isDraggingH = true
closeMenu()
}
const moveHDragging = domDebounce(e => {
const delta = e.clientX - initialMouse
const delta = getLocation(e).x - initialMouse
const weight = delta / availWidth
const newScrollLeft = initialScroll + weight * $maxScrollLeft
scroll.update(state => ({
@ -99,7 +112,9 @@
})
const stopHDragging = () => {
document.removeEventListener("mousemove", moveHDragging)
document.removeEventListener("touchmove", moveHDragging)
document.removeEventListener("mouseup", stopHDragging)
document.removeEventListener("touchend", stopHDragging)
isDraggingH = false
}
</script>
@ -109,6 +124,7 @@
class="v-scrollbar"
style="--size:{ScrollBarSize}px; top:{barTop}px; height:{barHeight}px;"
on:mousedown={startVDragging}
on:touchstart={startVDragging}
class:dragging={isDraggingV}
/>
{/if}
@ -117,6 +133,7 @@
class="h-scrollbar"
style="--size:{ScrollBarSize}px; left:{barLeft}px; width:{barWidth}px;"
on:mousedown={startHDragging}
on:touchstart={startHDragging}
class:dragging={isDraggingH}
/>
{/if}

View File

@ -1,4 +1,5 @@
import { writable, get } from "svelte/store"
import { Helpers } from "@budibase/bbui"
export const createStores = () => {
const copiedCell = writable(null)
@ -12,7 +13,16 @@ export const createActions = context => {
const { copiedCell, focusedCellAPI } = context
const copy = () => {
copiedCell.set(get(focusedCellAPI)?.getValue())
const value = get(focusedCellAPI)?.getValue()
copiedCell.set(value)
// Also copy a stringified version to the clipboard
let stringified = ""
if (value != null && value !== "") {
// Only conditionally stringify to avoid redundant quotes around text
stringified = typeof value === "object" ? JSON.stringify(value) : value
}
Helpers.copyToClipboard(stringified)
}
const paste = () => {

View File

@ -11,15 +11,14 @@
"scripts": {
"prebuild": "rimraf dist/",
"build": "node ./scripts/build.js",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
"postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"debug": "yarn build && node --expose-gc --inspect=9222 dist/index.js",
"test": "bash scripts/test.sh",
"test:memory": "jest --maxWorkers=2 --logHeapUsage --forceExit",
"test:watch": "jest --watch",
"predocker": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn build && docker build . -t app-service --label version=$BUDIBASE_RELEASE_VERSION",
"run:docker": "node dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"dev:stack:up": "node scripts/dev/manage.js up",
@ -54,7 +53,7 @@
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
"@google-cloud/firestore": "5.0.2",
"@google-cloud/firestore": "6.8.0",
"@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@socket.io/redis-adapter": "^8.2.1",
@ -70,7 +69,6 @@
"curlconverter": "3.21.0",
"dd-trace": "3.13.2",
"dotenv": "8.2.0",
"fix-path": "3.0.0",
"form-data": "4.0.0",
"global-agent": "3.0.0",
"google-auth-library": "7.12.0",
@ -96,12 +94,11 @@
"object-sizeof": "2.6.1",
"open": "8.4.0",
"openai": "^3.2.1",
"openapi-types": "9.3.1",
"pg": "8.10.0",
"posthog-node": "1.3.0",
"pouchdb": "7.3.0",
"pouchdb-all-dbs": "1.0.2",
"pouchdb-all-dbs": "1.1.1",
"pouchdb-find": "7.2.2",
"pouchdb-replication-stream": "1.2.9",
"redis": "4",
"server-destroy": "1.0.1",
"snowflake-promise": "^4.5.0",
@ -113,8 +110,7 @@
"validate.js": "0.13.1",
"vm2": "^3.9.19",
"worker-farm": "1.7.0",
"xml2js": "0.5.0",
"yargs": "13.2.4"
"xml2js": "0.5.0"
},
"devDependencies": {
"@babel/core": "7.17.4",
@ -144,7 +140,6 @@
"jest-runner": "29.6.2",
"jest-serial-runner": "1.2.1",
"nodemon": "2.0.15",
"openapi-types": "9.3.1",
"openapi-typescript": "5.2.0",
"path-to-regexp": "6.2.0",
"rimraf": "3.0.2",
@ -154,7 +149,8 @@
"ts-node": "10.8.1",
"tsconfig-paths": "4.0.0",
"typescript": "5.2.2",
"update-dotenv": "1.1.1"
"update-dotenv": "1.1.1",
"yargs": "13.2.4"
},
"optionalDependencies": {
"oracledb": "5.3.0"
@ -171,6 +167,22 @@
"target": "build"
}
]
},
"build": {
"outputs": [
"{projectRoot}/builder",
"{projectRoot}/client",
"{projectRoot}/dist"
],
"dependsOn": [
{
"projects": [
"@budibase/client",
"@budibase/builder"
],
"target": "build"
}
]
}
}
}

View File

@ -5,7 +5,6 @@ import {
getTableParams,
} from "../../db/utils"
import { destroy as tableDestroy } from "./table/internal"
import { BuildSchemaErrors, InvalidColumns } from "../../constants"
import { getIntegration } from "../../integrations"
import { invalidateDynamicVariables } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core"
@ -14,10 +13,13 @@ import {
CreateDatasourceResponse,
Datasource,
DatasourcePlus,
ExternalTable,
FetchDatasourceInfoRequest,
FetchDatasourceInfoResponse,
IntegrationBase,
Schema,
SourceName,
Table,
UpdateDatasourceResponse,
UserCtx,
VerifyDatasourceRequest,
@ -27,23 +29,6 @@ import sdk from "../../sdk"
import { builderSocket } from "../../websockets"
import { setupCreationAuth as googleSetupCreationAuth } from "../../integrations/googlesheets"
function getErrorTables(errors: any, errorType: string) {
return Object.entries(errors)
.filter(entry => entry[1] === errorType)
.map(([name]) => name)
}
function updateError(error: any, newError: any, tables: string[]) {
if (!error) {
error = ""
}
if (error.length > 0) {
error += "\n"
}
error += `${newError} ${tables.join(", ")}`
return error
}
async function getConnector(
datasource: Datasource
): Promise<IntegrationBase | DatasourcePlus> {
@ -71,48 +56,36 @@ async function getAndMergeDatasource(datasource: Datasource) {
return await sdk.datasources.enrich(enrichedDatasource)
}
async function buildSchemaHelper(datasource: Datasource) {
async function buildSchemaHelper(datasource: Datasource): Promise<Schema> {
const connector = (await getConnector(datasource)) as DatasourcePlus
await connector.buildSchema(datasource._id!, datasource.entities!)
const errors = connector.schemaErrors
let error = null
if (errors && Object.keys(errors).length > 0) {
const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY)
const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN)
if (noKey.length) {
error = updateError(
error,
"No primary key constraint found for the following:",
noKey
return await connector.buildSchema(
datasource._id!,
datasource.entities! as Record<string, ExternalTable>
)
}
if (invalidCol.length) {
const invalidCols = Object.values(InvalidColumns).join(", ")
error = updateError(
error,
`Cannot use columns ${invalidCols} found in following:`,
invalidCol
)
}
}
return { tables: connector.tables, error }
}
async function buildFilteredSchema(datasource: Datasource, filter?: string[]) {
let { tables, error } = await buildSchemaHelper(datasource)
let finalTables = tables
if (filter) {
finalTables = {}
for (let key in tables) {
if (
filter.some((filter: any) => filter.toLowerCase() === key.toLowerCase())
) {
finalTables[key] = tables[key]
async function buildFilteredSchema(
datasource: Datasource,
filter?: string[]
): Promise<Schema> {
let schema = await buildSchemaHelper(datasource)
if (!filter) {
return schema
}
let filteredSchema: Schema = { tables: {}, errors: {} }
for (let key in schema.tables) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.tables[key] = schema.tables[key]
}
}
for (let key in schema.errors) {
if (filter.some(filter => filter.toLowerCase() === key.toLowerCase())) {
filteredSchema.errors[key] = schema.errors[key]
}
return { tables: finalTables, error }
}
return filteredSchema
}
export async function fetch(ctx: UserCtx) {
@ -156,7 +129,7 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
const tablesFilter = ctx.request.body.tablesFilter
const datasource = await sdk.datasources.get(ctx.params.datasourceId)
const { tables, error } = await buildFilteredSchema(datasource, tablesFilter)
const { tables, errors } = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = tables
setDefaultDisplayColumns(datasource)
@ -164,13 +137,11 @@ export async function buildSchemaFromDb(ctx: UserCtx) {
sdk.tables.populateExternalTableSchemas(datasource)
)
datasource._rev = dbResp.rev
const cleanedDatasource = await sdk.datasources.removeSecretSingle(datasource)
const res: any = { datasource: cleanedDatasource }
if (error) {
res.error = error
ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
errors,
}
ctx.body = res
}
/**
@ -298,15 +269,12 @@ export async function save(
type: plus ? DocumentType.DATASOURCE_PLUS : DocumentType.DATASOURCE,
}
let schemaError = null
let errors: Record<string, string> = {}
if (fetchSchema) {
const { tables, error } = await buildFilteredSchema(
datasource,
tablesFilter
)
schemaError = error
datasource.entities = tables
const schema = await buildFilteredSchema(datasource, tablesFilter)
datasource.entities = schema.tables
setDefaultDisplayColumns(datasource)
errors = schema.errors
}
if (preSaveAction[datasource.source]) {
@ -327,13 +295,10 @@ export async function save(
}
}
const response: CreateDatasourceResponse = {
ctx.body = {
datasource: await sdk.datasources.removeSecretSingle(datasource),
errors,
}
if (schemaError) {
response.error = schemaError
}
ctx.body = response
builderSocket?.emitDatasourceUpdate(ctx, datasource)
}

View File

@ -23,7 +23,10 @@ describe("/applications/:appId/import", () => {
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body.message).toBe("app updated")
const appPackage = await config.api.application.get(appId!)
expect(appPackage.navigation?.links?.length).toBe(2)
expect(expect(appPackage.navigation?.links?.[0].url).toBe("/blank"))
expect(expect(appPackage.navigation?.links?.[1].url).toBe("/derp"))
const screens = await config.api.screen.list()
expect(screens.length).toBe(2)
expect(screens[0].routing.route).toBe("/derp")

View File

@ -37,7 +37,7 @@ describe("/datasources", () => {
.expect(200)
expect(res.body.datasource.name).toEqual("Test")
expect(res.body.errors).toBeUndefined()
expect(res.body.errors).toEqual({})
expect(events.datasource.created).toBeCalledTimes(1)
})
})

View File

@ -1,4 +1,4 @@
import Sentry from "@sentry/node"
import * as Sentry from "@sentry/node"
if (process.env.DD_APM_ENABLED) {
require("./ddApm")

View File

@ -159,11 +159,6 @@ export enum InvalidColumns {
TABLE_ID = "tableId",
}
export enum BuildSchemaErrors {
NO_KEY = "no_key",
INVALID_COLUMN = "invalid_column",
}
export enum AutomationErrors {
INCORRECT_TYPE = "INCORRECT_TYPE",
MAX_ITERATIONS = "MAX_ITERATIONS_REACHED",

View File

@ -1,12 +1,11 @@
import { bootstrap } from "global-agent"
const fixPath = require("fix-path")
import { checkDevelopmentEnvironment } from "./utilities/fileSystem"
function runServer() {
// this will shutdown the system if development environment not ready
// will print an error explaining what to do
checkDevelopmentEnvironment()
fixPath()
// this will setup http and https proxies form env variables
process.env.GLOBAL_AGENT_FORCE_GLOBAL_AGENT = "false"
bootstrap()

View File

@ -18,6 +18,7 @@ import _ from "lodash"
import { generator } from "@budibase/backend-core/tests"
import { utils } from "@budibase/backend-core"
import { databaseTestProviders } from "../integrations/tests/utils"
import { Client } from "pg"
const config = setup.getConfig()!
@ -1055,4 +1056,46 @@ describe("postgres integrations", () => {
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
})
})
describe("POST /api/datasources/:datasourceId/schema", () => {
let client: Client
beforeEach(async () => {
client = new Client(
(await databaseTestProviders.postgres.getDsConfig()).config!
)
await client.connect()
})
afterEach(async () => {
await client.query(`DROP TABLE IF EXISTS "table"`)
await client.end()
})
it("recognises when a table has no primary key", async () => {
await client.query(`CREATE TABLE "table" (id SERIAL)`)
const response = await makeRequest(
"post",
`/api/datasources/${postgresDatasource._id}/schema`
)
expect(response.body.errors).toEqual({
table: "Table must have a primary key.",
})
})
it("recognises when a table is using a reserved column name", async () => {
await client.query(`CREATE TABLE "table" (_id SERIAL PRIMARY KEY) `)
const response = await makeRequest(
"post",
`/api/datasources/${postgresDatasource._id}/schema`
)
expect(response.body.errors).toEqual({
table: "Table contains invalid columns.",
})
})
})
})

View File

@ -14,9 +14,14 @@ import {
SortJson,
ExternalTable,
TableRequest,
Schema,
} from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import { buildExternalTableId, finaliseExternalTables } from "./utils"
import {
buildExternalTableId,
checkExternalTables,
finaliseExternalTables,
} from "./utils"
import { GoogleSpreadsheet, GoogleSpreadsheetRow } from "google-spreadsheet"
import fetch from "node-fetch"
import { cache, configs, context, HTTPError } from "@budibase/backend-core"
@ -138,8 +143,6 @@ const SCHEMA: Integration = {
class GoogleSheetsIntegration implements DatasourcePlus {
private readonly config: GoogleSheetsConfig
private client: GoogleSpreadsheet
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: GoogleSheetsConfig) {
this.config = config
@ -281,19 +284,37 @@ class GoogleSheetsIntegration implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
): Promise<Schema> {
// not fully configured yet
if (!this.config.auth) {
return
return { tables: {}, errors: {} }
}
await this.connect()
const sheets = this.client.sheetsByIndex
const tables: Record<string, ExternalTable> = {}
let errors: Record<string, string> = {}
await utils.parallelForeach(
sheets,
async sheet => {
// must fetch rows to determine schema
try {
await sheet.getRows()
} catch (err) {
// We expect this to always be an Error so if it's not, rethrow it to
// make sure we don't fail quietly.
if (!(err instanceof Error)) {
throw err
}
if (err.message.startsWith("No values in the header row")) {
errors[sheet.title] = err.message
} else {
// If we get an error we don't expect, rethrow to avoid failing
// quietly.
throw err
}
return
}
const id = buildExternalTableId(datasourceId, sheet.title)
tables[sheet.title] = this.getTableSchema(
@ -305,9 +326,9 @@ class GoogleSheetsIntegration implements DatasourcePlus {
},
10
)
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
let externalTables = finaliseExternalTables(tables, entities)
errors = { ...errors, ...checkExternalTables(externalTables) }
return { tables: externalTables, errors }
}
async query(json: QueryJson) {

View File

@ -11,6 +11,7 @@ import {
DatasourceFeature,
ConnectionInfo,
SourceName,
Schema,
} from "@budibase/types"
import {
getSqlQuery,
@ -18,6 +19,7 @@ import {
convertSqlType,
finaliseExternalTables,
SqlClient,
checkExternalTables,
} from "./utils"
import Sql from "./base/sql"
import { MSSQLTablesResponse, MSSQLColumn } from "./base/types"
@ -190,8 +192,6 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
private readonly config: MSSQLConfig
private index: number = 0
private client?: sqlServer.ConnectionPool
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
MASTER_TABLES = [
"spt_fallback_db",
@ -381,7 +381,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
): Promise<Schema> {
await this.connect()
let tableInfo: MSSQLTablesResponse[] = await this.runSQL(this.TABLES_SQL)
if (tableInfo == null || !Array.isArray(tableInfo)) {
@ -445,9 +445,12 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
schema,
}
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
let externalTables = finaliseExternalTables(tables, entities)
let errors = checkExternalTables(externalTables)
return {
tables: externalTables,
errors,
}
}
async queryTableNames() {

View File

@ -10,6 +10,7 @@ import {
DatasourceFeature,
ConnectionInfo,
SourceName,
Schema,
} from "@budibase/types"
import {
getSqlQuery,
@ -17,6 +18,7 @@ import {
buildExternalTableId,
convertSqlType,
finaliseExternalTables,
checkExternalTables,
} from "./utils"
import dayjs from "dayjs"
import { NUMBER_REGEX } from "../utilities"
@ -140,8 +142,6 @@ export function bindingTypeCoerce(bindings: any[]) {
class MySQLIntegration extends Sql implements DatasourcePlus {
private config: MySQLConfig
private client?: mysql.Connection
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
constructor(config: MySQLConfig) {
super(SqlClient.MY_SQL)
@ -279,7 +279,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
): Promise<Schema> {
const tables: { [key: string]: ExternalTable } = {}
await this.connect()
@ -328,9 +328,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
} finally {
await this.disconnect()
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
let externalTables = finaliseExternalTables(tables, entities)
let errors = checkExternalTables(tables)
return { tables: externalTables, errors }
}
async queryTableNames() {

View File

@ -9,9 +9,11 @@ import {
DatasourcePlus,
DatasourceFeature,
ConnectionInfo,
Schema,
} from "@budibase/types"
import {
buildExternalTableId,
checkExternalTables,
convertSqlType,
finaliseExternalTables,
getSqlQuery,
@ -108,9 +110,6 @@ class OracleIntegration extends Sql implements DatasourcePlus {
private readonly config: OracleConfig
private index: number = 1
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
private readonly COLUMNS_SQL = `
SELECT
tabs.table_name,
@ -265,7 +264,7 @@ class OracleIntegration extends Sql implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
): Promise<Schema> {
const columnsResponse = await this.internalQuery<OracleColumnsResponse>({
sql: this.COLUMNS_SQL,
})
@ -326,9 +325,9 @@ class OracleIntegration extends Sql implements DatasourcePlus {
})
})
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
let externalTables = finaliseExternalTables(tables, entities)
let errors = checkExternalTables(externalTables)
return { tables: externalTables, errors }
}
async getTableNames() {

View File

@ -10,6 +10,7 @@ import {
DatasourceFeature,
ConnectionInfo,
SourceName,
Schema,
} from "@budibase/types"
import {
getSqlQuery,
@ -17,6 +18,7 @@ import {
convertSqlType,
finaliseExternalTables,
SqlClient,
checkExternalTables,
} from "./utils"
import Sql from "./base/sql"
import { PostgresColumn } from "./base/types"
@ -145,8 +147,6 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
private readonly config: PostgresConfig
private index: number = 1
private open: boolean
public tables: Record<string, ExternalTable> = {}
public schemaErrors: Record<string, string> = {}
COLUMNS_SQL!: string
@ -274,7 +274,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
async buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
) {
): Promise<Schema> {
let tableKeys: { [key: string]: string[] } = {}
await this.openConnection()
try {
@ -342,9 +342,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
}
}
const final = finaliseExternalTables(tables, entities)
this.tables = final.tables
this.schemaErrors = final.errors
let finalizedTables = finaliseExternalTables(tables, entities)
let errors = checkExternalTables(finalizedTables)
return { tables: finalizedTables, errors }
} catch (err) {
// @ts-ignore
throw new Error(err)

View File

@ -4,13 +4,10 @@ import {
SearchFilters,
Datasource,
FieldType,
ExternalTable,
} from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils"
import {
BuildSchemaErrors,
InvalidColumns,
NoEmptyFilterStrings,
} from "../constants"
import { InvalidColumns, NoEmptyFilterStrings } from "../constants"
import { helpers } from "@budibase/shared-core"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
@ -266,9 +263,9 @@ export function shouldCopySpecialColumn(
function copyExistingPropsOver(
tableName: string,
table: Table,
entities: { [key: string]: any },
tableIds: [string]
) {
entities: Record<string, Table>,
tableIds: string[]
): Table {
if (entities && entities[tableName]) {
if (entities[tableName]?.primaryDisplay) {
table.primaryDisplay = entities[tableName].primaryDisplay
@ -295,42 +292,41 @@ function copyExistingPropsOver(
/**
* Look through the final table definitions to see if anything needs to be
* copied over from the old and if any errors have occurred mark them so
* that the user can be made aware.
* copied over from the old.
* @param tables The list of tables that have been retrieved from the external database.
* @param entities The old list of tables, if there was any to look for definitions in.
*/
export function finaliseExternalTables(
tables: { [key: string]: any },
entities: { [key: string]: any }
) {
const invalidColumns = Object.values(InvalidColumns)
let finalTables: { [key: string]: any } = {}
const errors: { [key: string]: string } = {}
// @ts-ignore
const tableIds: [string] = Object.values(tables).map(table => table._id)
tables: Record<string, ExternalTable>,
entities: Record<string, ExternalTable>
): Record<string, ExternalTable> {
let finalTables: Record<string, Table> = {}
const tableIds = Object.values(tables).map(table => table._id!)
for (let [name, table] of Object.entries(tables)) {
const schemaFields = Object.keys(table.schema)
// make sure every table has a key
if (table.primary == null || table.primary.length === 0) {
errors[name] = BuildSchemaErrors.NO_KEY
continue
} else if (
schemaFields.find(field =>
invalidColumns.includes(field as InvalidColumns)
)
) {
errors[name] = BuildSchemaErrors.INVALID_COLUMN
continue
}
// make sure all previous props have been added back
finalTables[name] = copyExistingPropsOver(name, table, entities, tableIds)
}
// sort the tables by name
finalTables = Object.entries(finalTables)
// sort the tables by name, this is for the UI to display them in alphabetical order
return Object.entries(finalTables)
.sort(([a], [b]) => a.localeCompare(b))
.reduce((r, [k, v]) => ({ ...r, [k]: v }), {})
return { tables: finalTables, errors }
}
export function checkExternalTables(
tables: Record<string, ExternalTable>
): Record<string, string> {
const invalidColumns = Object.values(InvalidColumns) as string[]
const errors: Record<string, string> = {}
for (let [name, table] of Object.entries(tables)) {
if (!table.primary || table.primary.length === 0) {
errors[name] = "Table must have a primary key."
}
const schemaFields = Object.keys(table.schema)
if (schemaFields.find(f => invalidColumns.includes(f))) {
errors[name] = "Table contains invalid columns."
}
}
return errors
}
/**

View File

@ -4,6 +4,8 @@ import {
Document,
Database,
RowValue,
DocumentType,
App,
} from "@budibase/types"
import backups from "../backups"
@ -12,9 +14,39 @@ export type FileAttributes = {
path: string
}
async function getNewAppMetadata(
tempDb: Database,
appDb: Database
): Promise<App> {
// static doc denoting app information
const docId = DocumentType.APP_METADATA
try {
const [tempMetadata, appMetadata] = await Promise.all([
tempDb.get<App>(docId),
appDb.get<App>(docId),
])
return {
...appMetadata,
automationErrors: undefined,
theme: tempMetadata.theme,
customTheme: tempMetadata.customTheme,
features: tempMetadata.features,
icon: tempMetadata.icon,
navigation: tempMetadata.navigation,
type: tempMetadata.type,
version: tempMetadata.version,
}
} catch (err: any) {
throw new Error(
`Unable to retrieve app metadata for import - ${err.message}`
)
}
}
function mergeUpdateAndDeleteDocuments(
updateDocs: Document[],
deleteDocs: Document[]
deleteDocs: Document[],
metadata: App
) {
// compress the documents to create and to delete (if same ID, then just update the rev)
const finalToDelete = []
@ -26,7 +58,7 @@ function mergeUpdateAndDeleteDocuments(
finalToDelete.push(deleteDoc)
}
}
return [...updateDocs, ...finalToDelete]
return [...updateDocs, ...finalToDelete, metadata]
}
async function removeImportableDocuments(db: Database) {
@ -90,12 +122,15 @@ export async function updateWithExport(
await backups.importApp(devId, tempDb, template, {
importObjStoreContents: false,
})
const newMetadata = await getNewAppMetadata(tempDb, appDb)
// get the documents to copy
const toUpdate = await getImportableDocuments(tempDb)
// clear out the old documents
const toDelete = await removeImportableDocuments(appDb)
// now bulk update documents - add new ones, delete old ones and update common ones
await appDb.bulkDocs(mergeUpdateAndDeleteDocuments(toUpdate, toDelete))
await appDb.bulkDocs(
mergeUpdateAndDeleteDocuments(toUpdate, toDelete, newMetadata)
)
} finally {
await tempDb.destroy()
}

View File

@ -0,0 +1,18 @@
import { App } from "@budibase/types"
import TestConfiguration from "../TestConfiguration"
import { TestAPI } from "./base"
export class ApplicationAPI extends TestAPI {
constructor(config: TestConfiguration) {
super(config)
}
get = async (appId: string): Promise<App> => {
const result = await this.request
.get(`/api/applications/${appId}/appPackage`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
return result.body.application as App
}
}

View File

@ -6,6 +6,7 @@ import { ViewV2API } from "./viewV2"
import { DatasourceAPI } from "./datasource"
import { LegacyViewAPI } from "./legacyView"
import { ScreenAPI } from "./screen"
import { ApplicationAPI } from "./application"
export default class API {
table: TableAPI
@ -15,6 +16,7 @@ export default class API {
permission: PermissionAPI
datasource: DatasourceAPI
screen: ScreenAPI
application: ApplicationAPI
constructor(config: TestConfiguration) {
this.table = new TableAPI(config)
@ -24,5 +26,6 @@ export default class API {
this.permission = new PermissionAPI(config)
this.datasource = new DatasourceAPI(config)
this.screen = new ScreenAPI(config)
this.application = new ApplicationAPI(config)
}
}

View File

@ -14,5 +14,5 @@ export function isSQL(datasource: Datasource): boolean {
SourceName.MYSQL,
SourceName.ORACLE,
]
return SQL.indexOf(datasource.source) !== -1
return SQL.indexOf(datasource.source) !== -1 || datasource.isSQL === true
}

View File

@ -2,7 +2,7 @@ import { Datasource } from "../../../documents"
export interface CreateDatasourceResponse {
datasource: Datasource
error?: any
errors: Record<string, string>
}
export interface UpdateDatasourceResponse {

View File

@ -9,6 +9,7 @@ export interface Datasource extends Document {
// the config is defined by the schema
config?: Record<string, any>
plus?: boolean
isSQL?: boolean
entities?: {
[key: string]: Table
}

View File

@ -1,4 +1,4 @@
import { Table } from "../documents"
import { ExternalTable, Table } from "../documents"
export const PASSWORD_REPLACEMENT = "--secret-value--"
@ -140,6 +140,7 @@ export interface DatasourceConfig {
export interface Integration {
docs: string
plus?: boolean
isSQL?: boolean
auth?: { type: string }
features?: Partial<Record<DatasourceFeature, boolean>>
relationships?: boolean
@ -174,14 +175,19 @@ export interface IntegrationBase {
}): void
}
export interface DatasourcePlus extends IntegrationBase {
tables: Record<string, Table>
schemaErrors: Record<string, string>
export interface Schema {
tables: Record<string, ExternalTable>
errors: Record<string, string>
}
export interface DatasourcePlus extends IntegrationBase {
// if the datasource supports the use of bindings directly (to protect against SQL injection)
// this returns the format of the identifier
getBindingIdentifier(): string
getStringConcat(parts: string[]): string
buildSchema(datasourceId: string, entities: Record<string, Table>): any
buildSchema(
datasourceId: string,
entities: Record<string, ExternalTable>
): Promise<Schema>
getTableNames(): Promise<string[]>
}

View File

@ -14,13 +14,13 @@
"scripts": {
"prebuild": "rimraf dist/",
"build": "node ../../scripts/build.js",
"postbuild": "copyfiles -f ../../yarn.lock ./dist/",
"check:types": "tsc -p tsconfig.json --noEmit --paths null",
"build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput",
"run:docker": "node dist/index.js",
"debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js",
"run:docker:cluster": "pm2-runtime start pm2.config.js",
"predocker": "yarn build && cp ../../yarn.lock ./dist/",
"build:docker": "yarn predocker && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION",
"build:docker": "yarn build && docker build . -t worker-service --label version=$BUDIBASE_RELEASE_VERSION",
"dev:stack:init": "node ./scripts/dev/manage.js init",
"dev:builder": "npm run dev:stack:init && nodemon",
"dev:built": "yarn run dev:stack:init && yarn run run:docker",
@ -51,9 +51,7 @@
"bcryptjs": "2.4.3",
"dd-trace": "3.13.2",
"dotenv": "8.6.0",
"elastic-apm-node": "3.38.0",
"global-agent": "3.0.0",
"got": "11.8.3",
"ical-generator": "4.1.0",
"joi": "17.6.0",
"koa": "2.13.4",

View File

@ -11,7 +11,7 @@ import { TestConfiguration } from "../../../../tests"
import { events } from "@budibase/backend-core"
// this test can 409 - retries reduce issues with this
jest.retryTimes(2)
jest.retryTimes(2, { logErrorsBeforeRetry: true })
jest.setTimeout(30000)
mocks.licenses.useScimIntegration()

3
scripts/build-single-image.sh Executable file
View File

@ -0,0 +1,3 @@
#!/bin/bash
yarn build --scope @budibase/server --scope @budibase/worker
docker build -f hosting/single/Dockerfile.v2 -t budibase:latest .

View File

@ -15,11 +15,7 @@ const { nodeExternalsPlugin } = require("esbuild-node-externals")
var argv = require("minimist")(process.argv.slice(2))
function runBuild(
entry,
outfile,
opts = { skipMeta: false, bundle: true, silent: false }
) {
function runBuild(entry, outfile) {
const isDev = process.env.NODE_ENV !== "production"
const tsconfig = argv["p"] || `tsconfig.build.json`
const tsconfigPathPluginContent = JSON.parse(
@ -40,16 +36,12 @@ function runBuild(
]
}
const metafile = !opts.skipMeta
const { bundle } = opts
const sharedConfig = {
entryPoints: [entry],
bundle,
bundle: true,
minify: !isDev,
sourcemap: isDev,
tsconfig,
format: opts?.forcedFormat,
plugins: [
TsconfigPathsPlugin({ tsconfig: tsconfigPathPluginContent }),
nodeExternalsPlugin(),
@ -58,10 +50,8 @@ function runBuild(
loader: {
".svelte": "copy",
},
metafile,
external: bundle
? ["deasync", "mock-aws-s3", "nock", "pino", "koa-pino-logger", "bull"]
: undefined,
metafile: true,
external: ["deasync", "mock-aws-s3", "nock", "bull"],
}
build({
@ -74,19 +64,16 @@ function runBuild(
fs.copyFileSync(file, `${process.cwd()}/dist/${path.basename(file)}`)
}
!opts.silent &&
console.log(
"\x1b[32m%s\x1b[0m",
`Build successfully in ${(Date.now() - start) / 1000} seconds`
)
})
if (metafile) {
fs.writeFileSync(
`dist/${path.basename(outfile)}.meta.json`,
JSON.stringify(result.metafile)
)
}
})
}

View File

@ -0,0 +1,52 @@
#!/bin/bash
packages_to_remove=(
@budibase/backend-core
@budibase/bbui
@budibase/builder
@budibase/cli
@budibase/client
@budibase/frontend-core
@budibase/pro
@budibase/sdk
@budibase/server
@budibase/shared-core
# We cannot remove string-templates yet because it cannot be bundled by esbuild as a dependency
@budibase/string-templates
@budibase/types
@budibase/worker
)
root_package_json=$(cat "package.json")
process_package() {
local pkg="$1"
local package_json=$(cat "$pkg/package.json")
local has_changes=false
for package_name in "${packages_to_remove[@]}"; do
if echo "$package_json" | jq -e --arg package_name "$package_name" '.dependencies | has($package_name)' > /dev/null; then
package_json=$(echo "$package_json" | jq "del(.dependencies[\"$package_name\"])")
has_changes=true
fi
done
if [ "$has_changes" = true ]; then
echo "$package_json" > "$1/package.json"
fi
}
for pkg in $(echo "$root_package_json" | jq -r '.workspaces.packages[]' ); do
if [[ "$pkg" == *"*"* ]]; then
# Use find to iterate through immediate subdirectories
find "$pkg" -maxdepth 1 -type d -print | while read -r workspace_package; do
process_package "$workspace_package"
done
else
process_package "$pkg"
fi
done
echo "$root_package_json" | jq "del(.resolutions)" > "package.json"

3183
yarn.lock

File diff suppressed because it is too large Load Diff