Merge remote-tracking branch 'origin/develop' into remove-datasource-from-schema-picker

This commit is contained in:
Dean 2022-07-13 17:12:07 +01:00
commit c0d43fdcdf
454 changed files with 11955 additions and 9352 deletions

View File

@ -0,0 +1,62 @@
name: Deploy Budibase Single Container Image to DockerHub
on:
push:
branches:
- "omnibus-action"
- "develop"
- "master"
- "main"
env:
BASE_BRANCH: ${{ github.event.pull_request.base.ref}}
BRANCH: ${{ github.event.pull_request.head.ref }}
CI: true
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
REGISTRY_URL: registry.hub.docker.com
jobs:
build:
name: "build"
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [14.x]
steps:
- name: "Checkout"
uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Setup QEMU
uses: docker/setup-qemu-action@v1
- name: Setup Docker Buildx
id: buildx
uses: docker/setup-buildx-action@v1
- name: Install Pro
run: yarn install:pro $BRANCH $BASE_BRANCH
- name: Run Yarn
run: yarn
- name: Run Yarn Bootstrap
run: yarn bootstrap
- name: Runt Yarn Lint
run: yarn lint
- name: Run Yarn Build
run: yarn build
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_API_KEY }}
- name: Get the latest release version
id: version
run: |
release_version=$(cat lerna.json | jq -r '.version')
echo $release_version
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
- name: Tag and release Budibase service docker image
uses: docker/build-push-action@v2
with:
context: .
push: true
platforms: linux/amd64,linux/arm64
tags: budibase/budibase,budibase/budibase:v${{ env.RELEASE_VERSION }}
file: ./hosting/single/Dockerfile

View File

@ -21,7 +21,8 @@ env:
# Posthog token used by ui at build time
POSTHOG_TOKEN: phc_uDYOfnFt6wAbBAXkC6STjcrTpAFiWIhqgFcsC1UVO5F
INTERCOM_TOKEN: ${{ secrets.INTERCOM_TOKEN }}
PERSONAL_ACCESS_TOKEN : ${{ secrets.PERSONAL_ACCESS_TOKEN }}
PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }}
FEATURE_PREVIEW_URL: https://budirelease.live
jobs:
release:
@ -124,4 +125,4 @@ jobs:
with:
webhook-url: ${{ secrets.PROD_DEPLOY_WEBHOOK_URL }}
content: "Release Env Deployment Complete: ${{ env.RELEASE_VERSION }} deployed to Budibase Release Env."
embed-title: ${{ env.RELEASE_VERSION }}
embed-title: ${{ env.RELEASE_VERSION }}

View File

@ -16,6 +16,16 @@ on:
- 'package.json'
- 'yarn.lock'
workflow_dispatch:
inputs:
versioning:
type: choice
description: "Versioning type: patch, minor, major"
default: patch
options:
- patch
- minor
- major
required: true
env:
# Posthog token used by ui at build time
@ -58,6 +68,7 @@ jobs:
- name: Publish budibase packages to NPM
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
RELEASE_VERSION_TYPE: ${{ github.event.inputs.version }}
run: |
# setup the username and email. I tend to use 'GitHub Actions Bot' with no email by default
git config --global user.name "Budibase Release Bot"

14
.vscode/settings.json vendored
View File

@ -3,5 +3,17 @@
"editor.codeActionsOnSave": {
"source.fixAll": true
},
"editor.defaultFormatter": "svelte.svelte-vscode"
"editor.defaultFormatter": "svelte.svelte-vscode",
"[json]": {
"editor.defaultFormatter": "vscode.json-language-features"
},
"[javascript]": {
"editor.defaultFormatter": "esbenp.prettier-vscode"
},
"debug.javascript.terminalOptions": {
"skipFiles": [
"${workspaceFolder}/packages/backend-core/node_modules/**",
"<node_internals>/**"
]
},
}

1
.yarnrc Normal file
View File

@ -0,0 +1 @@
network-timeout 100000

View File

@ -11,8 +11,8 @@ sources:
- https://github.com/Budibase/budibase
- https://budibase.com
type: application
version: 0.2.10
appVersion: 1.0.48
version: 0.2.11
appVersion: 1.0.214
dependencies:
- name: couchdb
version: 3.6.1

View File

@ -122,6 +122,14 @@ spec:
value: {{ .Values.globals.automationMaxIterations | quote }}
- name: TENANT_FEATURE_FLAGS
value: {{ .Values.globals.tenantFeatureFlags | quote }}
{{ if .Values.globals.bbAdminUserEmail }}
- name: BB_ADMIN_USER_EMAIL
value: { { .Values.globals.bbAdminUserEmail | quote } }
{{ end }}
{{ if .Values.globals.bbAdminUserPassword }}
- name: BB_ADMIN_USER_PASSWORD
value: { { .Values.globals.bbAdminUserPassword | quote } }
{{ end }}
image: budibase/apps:{{ .Values.globals.appVersion }}
imagePullPolicy: Always

View File

@ -103,7 +103,7 @@ globals:
google:
clientId: ""
secret: ""
automationMaxIterations: "500"
automationMaxIterations: "200"
createSecrets: true # creates an internal API key, JWT secrets and redis password for you

View File

@ -18,4 +18,8 @@ MINIO_PORT=4004
COUCH_DB_PORT=4005
REDIS_PORT=6379
WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION
BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=

View File

@ -23,6 +23,8 @@ services:
ENABLE_ANALYTICS: "true"
REDIS_URL: redis-service:6379
REDIS_PASSWORD: ${REDIS_PASSWORD}
BB_ADMIN_USER_EMAIL: ${BB_ADMIN_USER_EMAIL}
BB_ADMIN_USER_PASSWORD: ${BB_ADMIN_USER_PASSWORD}
depends_on:
- worker-service
- redis-service

View File

@ -19,3 +19,7 @@ COUCH_DB_PORT=4005
REDIS_PORT=6379
WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION
# An admin user can be automatically created initially if these are set
BB_ADMIN_USER_EMAIL=
BB_ADMIN_USER_PASSWORD=

View File

@ -10,15 +10,14 @@ certbot certonly --webroot --webroot-path="/var/www/html" \
if (($? != 0)); then
echo "ERROR: certbot request failed for $CUSTOM_DOMAIN use http on port 80 - exiting"
nginx -s stop
exit 1
else
cp /app/letsencrypt/options-ssl-nginx.conf /etc/letsencrypt/options-ssl-nginx.conf
cp /app/letsencrypt/ssl-dhparams.pem /etc/letsencrypt/ssl-dhparams.pem
cp /app/letsencrypt/nginx-ssl.conf /etc/nginx/sites-available/nginx-ssl.conf
sed -i 's/CUSTOM_DOMAIN/$CUSTOM_DOMAIN/g' /etc/nginx/sites-available/nginx-ssl.conf
sed -i "s/CUSTOM_DOMAIN/$CUSTOM_DOMAIN/g" /etc/nginx/sites-available/nginx-ssl.conf
ln -s /etc/nginx/sites-available/nginx-ssl.conf /etc/nginx/sites-enabled/nginx-ssl.conf
echo "INFO: restart nginx after certbot request"
nginx -s reload
/etc/init.d/nginx restart
fi

View File

@ -6,6 +6,7 @@ server {
ssl_certificate_key /etc/letsencrypt/live/CUSTOM_DOMAIN/privkey.pem;
include /etc/letsencrypt/options-ssl-nginx.conf;
ssl_dhparam /etc/letsencrypt/ssl-dhparams.pem;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
@ -91,4 +92,5 @@ server {
gzip_proxied any;
gzip_comp_level 6;
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
}

View File

@ -0,0 +1,17 @@
#!/bin/bash
echo ${TARGETBUILD} > /buildtarget.txt
if [[ "${TARGETBUILD}" = "aas" ]]; then
# Azure AppService uses /home for persisent data & SSH on port 2222
mkdir -p /home/budibase/{minio,couchdb}
mkdir -p /home/budibase/couchdb/data
chown -R couchdb:couchdb /home/budibase/couchdb/
apt update
apt-get install -y openssh-server
sed -i 's#dir=/opt/couchdb/data/search#dir=/home/budibase/couchdb/data/search#' /opt/clouseau/clouseau.ini
sed -i 's#/minio/minio server /minio &#/minio/minio server /home/budibase/minio &#' /runner.sh
sed -i 's#database_dir = ./data#database_dir = /home/budibase/couchdb/data#' /opt/couchdb/etc/default.ini
sed -i 's#view_index_dir = ./data#view_index_dir = /home/budibase/couchdb/data#' /opt/couchdb/etc/default.ini
sed -i "s/#Port 22/Port 2222/" /etc/ssh/sshd_config
/etc/init.d/ssh restart
fi

View File

@ -25,6 +25,13 @@ if [[ $(redis-cli -a $REDIS_PASSWORD --no-auth-warning ping) != 'PONG' ]]; then
healthy=false
fi
# mino, clouseau,
nginx -t -q
NGINX_STATUS=$?
if [[ $NGINX_STATUS -gt 0 ]]; then
echo 'ERROR: Nginx config problem';
healthy=false
fi
if [ $healthy == true ]; then
exit 0

View File

@ -19,8 +19,12 @@ ADD packages/worker .
RUN node /pinVersions.js && yarn && yarn build && /cleanup.sh
FROM couchdb:3.2.1
# TARGETARCH can be amd64 or arm e.g. docker build --build-arg TARGETARCH=amd64
ARG TARGETARCH amd64
#TARGETBUILD can be set to single (for single docker image) or aas (for azure app service)
# e.g. docker build --build-arg TARGETBUILD=aas ....
ARG TARGETBUILD single
ENV TARGETBUILD $TARGETBUILD
COPY --from=build /app /app
COPY --from=build /worker /worker
@ -30,26 +34,32 @@ ENV \
ARCHITECTURE=amd \
BUDIBASE_ENVIRONMENT=PRODUCTION \
CLUSTER_PORT=80 \
COUCHDB_PASSWORD=budibase \
COUCHDB_USER=budibase \
COUCH_DB_URL=http://budibase:budibase@localhost:5984 \
CUSTOM_DOMAIN=budi001.custom.com \
# CUSTOM_DOMAIN=budi001.custom.com \
DEPLOYMENT_ENVIRONMENT=docker \
INTERNAL_API_KEY=budibase \
JWT_SECRET=testsecret \
MINIO_ACCESS_KEY=budibase \
MINIO_SECRET_KEY=budibase \
MINIO_URL=http://localhost:9000 \
POSTHOG_TOKEN=phc_fg5I3nDOf6oJVMHSaycEhpPdlgS8rzXG2r6F2IpxCHS \
REDIS_PASSWORD=budibase \
REDIS_URL=localhost:6379 \
SELF_HOSTED=1 \
TARGETBUILD=$TARGETBUILD \
WORKER_PORT=4002 \
WORKER_URL=http://localhost:4002
WORKER_URL=http://localhost:4002 \
APPS_URL=http://localhost:4001
# These secret env variables are generated by the runner at startup
# their values can be overriden by the user, they will be written
# to the .env file in the /data directory for use later on
# REDIS_PASSWORD=budibase \
# COUCHDB_PASSWORD=budibase \
# COUCHDB_USER=budibase \
# COUCH_DB_URL=http://budibase:budibase@localhost:5984 \
# INTERNAL_API_KEY=budibase \
# JWT_SECRET=testsecret \
# MINIO_ACCESS_KEY=budibase \
# MINIO_SECRET_KEY=budibase \
# install base dependencies
RUN apt-get update && \
apt-get install -y software-properties-common wget nginx && \
apt-get install -y software-properties-common wget nginx uuid-runtime && \
apt-add-repository 'deb http://security.debian.org/debian-security stretch/updates main' && \
apt-get update
@ -61,7 +71,8 @@ RUN curl -sL https://deb.nodesource.com/setup_16.x -o /tmp/nodesource_setup.sh &
npm install --global yarn pm2
# setup nginx
ADD hosting/single/nginx.conf /etc/nginx
ADD hosting/single/nginx/nginx.conf /etc/nginx
ADD hosting/single/nginx/nginx-default-site.conf /etc/nginx/sites-enabled/default
RUN mkdir -p /var/log/nginx && \
touch /var/log/nginx/error.log && \
touch /var/run/nginx.pid
@ -80,13 +91,13 @@ RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clou
WORKDIR /opt/clouseau
RUN mkdir ./bin
ADD hosting/single/clouseau ./bin/
ADD hosting/single/log4j.properties hosting/single/clouseau.ini ./
ADD hosting/single/clouseau/clouseau ./bin/
ADD hosting/single/clouseau/log4j.properties hosting/single/clouseau/clouseau.ini ./
RUN chmod +x ./bin/clouseau
# setup CouchDB
WORKDIR /opt/couchdb
ADD hosting/single/vm.args ./etc/
ADD hosting/single/couch/vm.args hosting/single/couch/local.ini ./etc/
# setup minio
WORKDIR /minio
@ -100,6 +111,12 @@ RUN chmod +x ./runner.sh
ADD hosting/scripts/healthcheck.sh .
RUN chmod +x ./healthcheck.sh
ADD hosting/scripts/build-target-paths.sh .
RUN chmod +x ./build-target-paths.sh
# For Azure App Service install SSH & point data locations to /home
RUN /build-target-paths.sh
# cleanup cache
RUN yarn cache clean -f

View File

@ -4,7 +4,6 @@
As an alternative to running several docker containers via docker-compose, the files under ./hosting/single can be used to build a docker image containing all of the Budibase components (minio, couch, clouseau etc).
We call this the 'single image' container as the Dockerfile adds all the components to a single docker image.
## Usage
- Amend Environment Variables
@ -22,9 +21,9 @@ If you have other arrangements for a proxy in front of the single image containe
We would suggest building the image with 6GB of RAM and 20GB of free disk space for build artifacts. The resulting image size will use approx 2GB of disk space.
### Build the Image
The guidance below is based on building the Budibase single image on Debian 11. If you use another distro or OS you will need to amend the commands to suit.
Install Node
Budibase requires a recent version of node (14+) than is in the base Debian repos so:
The guidance below is based on building the Budibase single image on Debian 11 and AlmaLinux 8. If you use another distro or OS you will need to amend the commands to suit.
#### Install Node
Budibase requires a more recent version of node (14+) than is available in the base Debian repos so:
```
curl -sL https://deb.nodesource.com/setup_16.x | sudo bash -
@ -35,25 +34,26 @@ Install yarn and lerna:
```
npm install -g yarn jest lerna
```
Install Docker
#### Install Docker
```
apt install -y docker.io
apt install -y python3-pip
pip3 install docker-compose
```
Check the versions of each installed version. This process was tested with the version numbers below so YMMV using anything else:
- Docker: 20.10.5
- docker-compose: 1.29.2
- node: 16.15.1
- yarn: 1.22.19
- lerna: 5.1.4
#### Get the Code
Clone the Budibase repo
```
git clone https://github.com/Budibase/budibase.git
cd budibase
```
#### Setup Node
Node setup:
```
node ./hosting/scripts/setup.js
@ -61,15 +61,20 @@ yarn
yarn bootstrap
yarn build
```
Build the image from the Dockerfile:
#### Build Image
The following yarn command does some prep and then runs the docker build command:
```
yarn build:docker:single
```
If the docker build step fails run that step again manually with:
If the docker build step fails try running that step again manually with:
```
docker build --no-cache -t budibase:latest -f ./hosting/single/Dockerfile .
docker build --build-arg TARGETARCH=amd --no-cache -t budibase:latest -f ./hosting/single/Dockerfile .
```
#### Azure App Services
Azure have some specific requirements for running a container in their App Service. Specifically, installation of SSH to port 2222 and data storage under /home. If you would like to build a budibase container for Azure App Service add the build argument shown below setting it to 'aas'. You can remove the CUSTOM_DOMAIN env variable from the Dockerfile too as Azure terminate SSL before requests reach the container.
```
docker build --build-arg TARGETARCH=amd --build-arg TARGETBUILD=aas -t budibase:latest -f ./hosting/single/Dockerfile .
```
### Run the Container
@ -85,6 +90,9 @@ When the container runs you should be able to access the container over http at
When the Budibase UI appears you will be prompted to create an account to get started.
### Podman
The single image container builds fine when using podman in place of docker. You may be prompted for the registry to use for the CouchDB image and the HEALTHCHECK parameter is not OCI compliant so is ignored.
### Check
There are many things that could go wrong so if your container is not building or running as expected please check the following before opening a support issue.
Verify the healthcheck status of the container:
@ -96,7 +104,6 @@ Check the container logs:
docker logs budibase
```
### Support
This single image build is still a work-in-progress so if you open an issue please provide the following information:
- The OS and OS version you are building on

View File

@ -7,7 +7,7 @@ name=clouseau@127.0.0.1
cookie=monster
; the path where you would like to store the search index files
dir=/opt/couchdb/data/search
dir=/data/search
; the number of search indexes that can be open simultaneously
max_indexes_open=500

View File

@ -0,0 +1,5 @@
; CouchDB Configuration Settings
[couchdb]
database_dir = /data/couch/dbs
view_index_dir = /data/couch/views

View File

@ -1,125 +0,0 @@
user www-data www-data;
error_log /var/log/nginx/error.log;
pid /var/run/nginx.pid;
worker_processes auto;
worker_rlimit_nofile 8192;
events {
worker_connections 1024;
}
http {
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
proxy_set_header Host $host;
charset utf-8;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
server_tokens off;
types_hash_max_size 2048;
# buffering
client_header_buffer_size 1k;
client_max_body_size 20M;
ignore_invalid_headers off;
proxy_buffering off;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
map $http_upgrade $connection_upgrade {
default "upgrade";
}
server {
listen 80 default_server;
listen [::]:80 default_server;
server_name _;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
# port_in_redirect off;
location ^~ /.well-known/acme-challenge/ {
default_type "text/plain";
root /var/www/html;
break;
}
location = /.well-known/acme-challenge/ {
return 404;
}
location /app {
proxy_pass http://127.0.0.1:4001;
}
location = / {
proxy_pass http://127.0.0.1:4001;
}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://127.0.0.1:4001;
}
location ~ ^/api/(system|admin|global)/ {
proxy_pass http://127.0.0.1:4002;
}
location /worker/ {
proxy_pass http://127.0.0.1:4002;
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
# 120s timeout on API requests
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://127.0.0.1:4001;
}
location /db/ {
proxy_pass http://127.0.0.1:5984;
rewrite ^/db/(.*)$ /$1 break;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;
# gzip
gzip on;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
}
}

View File

@ -0,0 +1,91 @@
server {
listen 80 default_server;
listen [::]:80 default_server;
server_name _;
client_max_body_size 1000m;
ignore_invalid_headers off;
proxy_buffering off;
# port_in_redirect off;
location ^~ /.well-known/acme-challenge/ {
default_type "text/plain";
root /var/www/html;
break;
}
location = /.well-known/acme-challenge/ {
return 404;
}
location /app {
proxy_pass http://127.0.0.1:4001;
}
location = / {
proxy_pass http://127.0.0.1:4001;
}
location ~ ^/(builder|app_) {
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://127.0.0.1:4001;
}
location ~ ^/api/(system|admin|global)/ {
proxy_pass http://127.0.0.1:4002;
}
location /worker/ {
proxy_pass http://127.0.0.1:4002;
rewrite ^/worker/(.*)$ /$1 break;
}
location /api/ {
# calls to the API are rate limited with bursting
limit_req zone=ratelimit burst=20 nodelay;
# 120s timeout on API requests
proxy_read_timeout 120s;
proxy_connect_timeout 120s;
proxy_send_timeout 120s;
proxy_http_version 1.1;
proxy_set_header Connection $connection_upgrade;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://127.0.0.1:4001;
}
location /db/ {
proxy_pass http://127.0.0.1:5984;
rewrite ^/db/(.*)$ /$1 break;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_connect_timeout 300;
proxy_http_version 1.1;
proxy_set_header Connection "";
chunked_transfer_encoding off;
proxy_pass http://127.0.0.1:9000;
}
client_header_timeout 60;
client_body_timeout 60;
keepalive_timeout 60;
# gzip
gzip on;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_types text/plain text/css text/xml application/json application/javascript application/rss+xml application/atom+xml image/svg+xml;
}

View File

@ -0,0 +1,37 @@
user www-data www-data;
error_log /var/log/nginx/error.log;
pid /var/run/nginx.pid;
worker_processes auto;
worker_rlimit_nofile 8192;
events {
worker_connections 1024;
}
http {
limit_req_zone $binary_remote_addr zone=ratelimit:10m rate=20r/s;
proxy_set_header Host $host;
charset utf-8;
sendfile on;
tcp_nopush on;
tcp_nodelay on;
server_tokens off;
types_hash_max_size 2048;
# buffering
client_header_buffer_size 1k;
client_max_body_size 20M;
ignore_invalid_headers off;
proxy_buffering off;
log_format main '$remote_addr - $remote_user [$time_local] "$request" '
'$status $body_bytes_sent "$http_referer" '
'"$http_user_agent" "$http_x_forwarded_for"';
map $http_upgrade $connection_upgrade {
default "upgrade";
}
include /etc/nginx/sites-enabled/*;
}

View File

@ -1,6 +1,34 @@
#!/bin/bash
declare -a ENV_VARS=("COUCHDB_USER" "COUCHDB_PASSWORD" "MINIO_ACCESS_KEY" "MINIO_SECRET_KEY" "INTERNAL_API_KEY" "JWT_SECRET" "REDIS_PASSWORD")
if [ -f "/data/.env" ]; then
export $(cat /data/.env | xargs)
fi
# first randomise any unset environment variables
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
if [[ -z "${temp}" ]]; then
eval "export $ENV_VAR=$(uuidgen | sed -e 's/-//g')"
fi
done
if [[ -z "${COUCH_DB_URL}" ]]; then
export COUCH_DB_URL=http://$COUCHDB_USER:$COUCHDB_PASSWORD@localhost:5984
fi
if [ ! -f "/data/.env" ]; then
touch /data/.env
for ENV_VAR in "${ENV_VARS[@]}"
do
temp=$(eval "echo \$$ENV_VAR")
echo "$ENV_VAR=$temp" >> /data/.env
done
fi
# make these directories in runner, incase of mount
mkdir -p /data/couch/dbs /data/couch/views
chown couchdb:couchdb /data/couch /data/couch/dbs /data/couch/views
redis-server --requirepass $REDIS_PASSWORD &
/opt/clouseau/bin/clouseau &
/minio/minio server /minio &
/minio/minio server /data/minio &
/docker-entrypoint.sh /opt/couchdb/bin/couchdb &
/etc/init.d/nginx restart
if [[ ! -z "${CUSTOM_DOMAIN}" ]]; then

View File

@ -1,4 +1,4 @@
#!/bin/bash
id=$(docker run -t -d -p 10000:10000 budibase:latest)
id=$(docker run -t -d -p 8080:80 budibase:latest)
docker exec -it $id bash
docker kill $id

View File

@ -1,5 +1,5 @@
{
"version": "1.0.207-alpha.8",
"version": "1.0.220-alpha.4",
"npmClient": "yarn",
"packages": [
"packages/*"

View File

@ -25,7 +25,7 @@
"bootstrap": "lerna bootstrap && lerna link && ./scripts/link-dependencies.sh",
"build": "lerna run build",
"build:dev": "lerna run prebuild && tsc --build --watch --preserveWatchOutput",
"release": "lerna publish patch --yes --force-publish && yarn release:pro",
"release": "lerna publish ${RELEASE_VERSION_TYPE:-patch} --yes --force-publish && yarn release:pro",
"release:develop": "lerna publish prerelease --yes --force-publish --dist-tag develop && yarn release:pro:develop",
"release:pro": "bash scripts/pro/release.sh",
"release:pro:develop": "bash scripts/pro/release.sh develop",
@ -40,7 +40,8 @@
"dev": "yarn run kill-all && lerna link && lerna run --parallel dev:builder --concurrency 1",
"dev:noserver": "yarn run kill-builder && lerna link && lerna run dev:stack:up && lerna run --parallel dev:builder --concurrency 1 --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker",
"dev:server": "yarn run kill-server && lerna run --parallel dev:builder --concurrency 1 --scope @budibase/backend-core --scope @budibase/worker --scope @budibase/server",
"test": "lerna run test",
"test": "lerna run test && yarn test:pro",
"test:pro": "bash scripts/pro/test.sh",
"lint:eslint": "eslint packages",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\"",
"lint": "yarn run lint:eslint && yarn run lint:prettier",
@ -83,4 +84,4 @@
"install:pro": "bash scripts/pro/install.sh",
"dep:clean": "yarn clean && yarn bootstrap"
}
}
}

View File

@ -3,5 +3,7 @@ const generic = require("./src/cache/generic")
module.exports = {
user: require("./src/cache/user"),
app: require("./src/cache/appMetadata"),
writethrough: require("./src/cache/writethrough"),
...generic,
cache: generic,
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "1.0.207-alpha.8",
"version": "1.0.220-alpha.4",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -20,7 +20,7 @@
"test:watch": "jest --watchAll"
},
"dependencies": {
"@budibase/types": "^1.0.207-alpha.8",
"@budibase/types": "^1.0.220-alpha.4",
"@techpass/passport-openidconnect": "0.3.2",
"aws-sdk": "2.1030.0",
"bcrypt": "5.0.1",
@ -36,6 +36,7 @@
"passport-google-oauth": "2.0.0",
"passport-jwt": "4.0.0",
"passport-local": "1.0.0",
"passport-oauth2-refresh": "^2.1.0",
"posthog-node": "1.3.0",
"pouchdb": "7.3.0",
"pouchdb-find": "7.2.2",
@ -58,11 +59,13 @@
]
},
"devDependencies": {
"@budibase/types": "^1.0.219",
"@shopify/jest-koa-mocks": "3.1.5",
"@types/jest": "27.5.1",
"@types/koa": "2.0.52",
"@types/node": "14.18.20",
"@types/node-fetch": "2.6.1",
"@types/pouchdb": "6.4.0",
"@types/redlock": "4.0.3",
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",

View File

@ -1,5 +1,5 @@
module.exports = {
Client: require("./src/redis"),
utils: require("./src/redis/utils"),
clients: require("./src/redis/authRedis"),
clients: require("./src/redis/init"),
}

View File

@ -2,6 +2,9 @@ const passport = require("koa-passport")
const LocalStrategy = require("passport-local").Strategy
const JwtStrategy = require("passport-jwt").Strategy
const { getGlobalDB } = require("./tenancy")
const refresh = require("passport-oauth2-refresh")
const { Configs } = require("./constants")
const { getScopedConfig } = require("./db/utils")
const {
jwt,
local,
@ -12,10 +15,13 @@ const {
tenancy,
appTenancy,
authError,
ssoCallbackUrl,
csrf,
internalApi,
} = require("./middleware")
const { invalidateUser } = require("./cache/user")
// Strategies
passport.use(new LocalStrategy(local.options, local.authenticate))
passport.use(new JwtStrategy(jwt.options, jwt.authenticate))
@ -34,6 +40,124 @@ passport.deserializeUser(async (user, done) => {
}
})
async function refreshOIDCAccessToken(db, chosenConfig, refreshToken) {
const callbackUrl = await oidc.getCallbackUrl(db, chosenConfig)
let enrichedConfig
let strategy
try {
enrichedConfig = await oidc.fetchStrategyConfig(chosenConfig, callbackUrl)
if (!enrichedConfig) {
throw new Error("OIDC Config contents invalid")
}
strategy = await oidc.strategyFactory(enrichedConfig)
} catch (err) {
console.error(err)
throw new Error("Could not refresh OAuth Token")
}
refresh.use(strategy, {
setRefreshOAuth2() {
return strategy._getOAuth2Client(enrichedConfig)
},
})
return new Promise(resolve => {
refresh.requestNewAccessToken(
Configs.OIDC,
refreshToken,
(err, accessToken, refreshToken, params) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshGoogleAccessToken(db, config, refreshToken) {
let callbackUrl = await google.getCallbackUrl(db, config)
let strategy
try {
strategy = await google.strategyFactory(config, callbackUrl)
} catch (err) {
console.error(err)
throw new Error("Error constructing OIDC refresh strategy", err)
}
refresh.use(strategy)
return new Promise(resolve => {
refresh.requestNewAccessToken(
Configs.GOOGLE,
refreshToken,
(err, accessToken, refreshToken, params) => {
resolve({ err, accessToken, refreshToken, params })
}
)
})
}
async function refreshOAuthToken(refreshToken, configType, configId) {
const db = getGlobalDB()
const config = await getScopedConfig(db, {
type: configType,
group: {},
})
let chosenConfig = {}
let refreshResponse
if (configType === Configs.OIDC) {
// configId - retrieved from cookie.
chosenConfig = config.configs.filter(c => c.uuid === configId)[0]
if (!chosenConfig) {
throw new Error("Invalid OIDC configuration")
}
refreshResponse = await refreshOIDCAccessToken(
db,
chosenConfig,
refreshToken
)
} else {
chosenConfig = config
refreshResponse = await refreshGoogleAccessToken(
db,
chosenConfig,
refreshToken
)
}
return refreshResponse
}
async function updateUserOAuth(userId, oAuthConfig) {
const details = {
accessToken: oAuthConfig.accessToken,
refreshToken: oAuthConfig.refreshToken,
}
try {
const db = getGlobalDB()
const dbUser = await db.get(userId)
//Do not overwrite the refresh token if a valid one is not provided.
if (typeof details.refreshToken !== "string") {
delete details.refreshToken
}
dbUser.oauth2 = {
...dbUser.oauth2,
...details,
}
await db.put(dbUser)
await invalidateUser(userId)
} catch (e) {
console.error("Could not update OAuth details for current user", e)
}
}
module.exports = {
buildAuthMiddleware: authenticated,
passport,
@ -46,4 +170,7 @@ module.exports = {
authError,
buildCsrfMiddleware: csrf,
internalApi,
refreshOAuthToken,
updateUserOAuth,
ssoCallbackUrl,
}

View File

@ -1,4 +1,4 @@
const redis = require("../redis/authRedis")
const redis = require("../redis/init")
const { doWithDB } = require("../db")
const { DocumentTypes } = require("../db/constants")

View File

@ -0,0 +1,92 @@
import { getTenantId } from "../../context"
import redis from "../../redis/init"
import RedisWrapper from "../../redis"
function generateTenantKey(key: string) {
const tenantId = getTenantId()
return `${key}:${tenantId}`
}
export = class BaseCache {
client: RedisWrapper | undefined
constructor(client: RedisWrapper | undefined = undefined) {
this.client = client
}
async getClient() {
return !this.client ? await redis.getCacheClient() : this.client
}
async keys(pattern: string) {
const client = await this.getClient()
return client.keys(pattern)
}
/**
* Read only from the cache.
*/
async get(key: string, opts = { useTenancy: true }) {
key = opts.useTenancy ? generateTenantKey(key) : key
const client = await this.getClient()
return client.get(key)
}
/**
* Write to the cache.
*/
async store(
key: string,
value: any,
ttl: number | null = null,
opts = { useTenancy: true }
) {
key = opts.useTenancy ? generateTenantKey(key) : key
const client = await this.getClient()
await client.store(key, value, ttl)
}
/**
* Remove from cache.
*/
async delete(key: string, opts = { useTenancy: true }) {
key = opts.useTenancy ? generateTenantKey(key) : key
const client = await this.getClient()
return client.delete(key)
}
/**
* Read from the cache. Write to the cache if not exists.
*/
async withCache(
key: string,
ttl: number,
fetchFn: any,
opts = { useTenancy: true }
) {
const cachedValue = await this.get(key, opts)
if (cachedValue) {
return cachedValue
}
try {
const fetchedValue = await fetchFn()
await this.store(key, fetchedValue, ttl, opts)
return fetchedValue
} catch (err) {
console.error("Error fetching before cache - ", err)
throw err
}
}
async bustCache(key: string, opts = { client: null }) {
const client = await this.getClient()
try {
await client.delete(generateTenantKey(key))
} catch (err) {
console.error("Error busting cache - ", err)
throw err
}
}
}

View File

@ -1,5 +1,6 @@
const redis = require("../redis/authRedis")
const { getTenantId } = require("../context")
const BaseCache = require("./base")
const GENERIC = new BaseCache()
exports.CacheKeys = {
CHECKLIST: "checklist",
@ -16,67 +17,13 @@ exports.TTL = {
ONE_DAY: 86400,
}
function generateTenantKey(key) {
const tenantId = getTenantId()
return `${key}:${tenantId}`
function performExport(funcName) {
return (...args) => GENERIC[funcName](...args)
}
exports.keys = async pattern => {
const client = await redis.getCacheClient()
return client.keys(pattern)
}
/**
* Read only from the cache.
*/
exports.get = async (key, opts = { useTenancy: true }) => {
key = opts.useTenancy ? generateTenantKey(key) : key
const client = await redis.getCacheClient()
const value = await client.get(key)
return value
}
/**
* Write to the cache.
*/
exports.store = async (key, value, ttl, opts = { useTenancy: true }) => {
key = opts.useTenancy ? generateTenantKey(key) : key
const client = await redis.getCacheClient()
await client.store(key, value, ttl)
}
exports.delete = async (key, opts = { useTenancy: true }) => {
key = opts.useTenancy ? generateTenantKey(key) : key
const client = await redis.getCacheClient()
return client.delete(key)
}
/**
* Read from the cache. Write to the cache if not exists.
*/
exports.withCache = async (key, ttl, fetchFn, opts = { useTenancy: true }) => {
const cachedValue = await exports.get(key, opts)
if (cachedValue) {
return cachedValue
}
try {
const fetchedValue = await fetchFn()
await exports.store(key, fetchedValue, ttl, opts)
return fetchedValue
} catch (err) {
console.error("Error fetching before cache - ", err)
throw err
}
}
exports.bustCache = async key => {
const client = await redis.getCacheClient()
try {
await client.delete(generateTenantKey(key))
} catch (err) {
console.error("Error busting cache - ", err)
throw err
}
}
exports.keys = performExport("keys")
exports.get = performExport("get")
exports.store = performExport("store")
exports.delete = performExport("delete")
exports.withCache = performExport("withCache")
exports.bustCache = performExport("bustCache")

View File

@ -0,0 +1,59 @@
require("../../../tests/utilities/TestConfiguration")
const { Writethrough } = require("../writethrough")
const { dangerousGetDB } = require("../../db")
const tk = require("timekeeper")
const START_DATE = Date.now()
tk.freeze(START_DATE)
const DELAY = 5000
const db = dangerousGetDB("test")
const db2 = dangerousGetDB("test2")
const writethrough = new Writethrough(db, DELAY), writethrough2 = new Writethrough(db2, DELAY)
describe("writethrough", () => {
describe("put", () => {
let first
it("should be able to store, will go to DB", async () => {
const response = await writethrough.put({ _id: "test", value: 1 })
const output = await db.get(response.id)
first = output
expect(output.value).toBe(1)
})
it("second put shouldn't update DB", async () => {
const response = await writethrough.put({ ...first, value: 2 })
const output = await db.get(response.id)
expect(first._rev).toBe(output._rev)
expect(output.value).toBe(1)
})
it("should put it again after delay period", async () => {
tk.freeze(START_DATE + DELAY + 1)
const response = await writethrough.put({ ...first, value: 3 })
const output = await db.get(response.id)
expect(response.rev).not.toBe(first._rev)
expect(output.value).toBe(3)
})
})
describe("get", () => {
it("should be able to retrieve", async () => {
const response = await writethrough.get("test")
expect(response.value).toBe(3)
})
})
describe("same doc, different databases (tenancy)", () => {
it("should be able to two different databases", async () => {
const resp1 = await writethrough.put({ _id: "db1", value: "first" })
const resp2 = await writethrough2.put({ _id: "db1", value: "second" })
expect(resp1.rev).toBeDefined()
expect(resp2.rev).toBeDefined()
expect((await db.get("db1")).value).toBe("first")
expect((await db2.get("db1")).value).toBe("second")
})
})
})

View File

@ -1,4 +1,4 @@
const redis = require("../redis/authRedis")
const redis = require("../redis/init")
const { getTenantId, lookupTenantId, doWithGlobalDB } = require("../tenancy")
const env = require("../environment")
const accounts = require("../cloud/accounts")

View File

@ -0,0 +1,120 @@
import BaseCache from "./base"
import { getWritethroughClient } from "../redis/init"
const DEFAULT_WRITE_RATE_MS = 10000
let CACHE: BaseCache | null = null
interface CacheItem {
doc: any
lastWrite: number
}
async function getCache() {
if (!CACHE) {
const client = await getWritethroughClient()
CACHE = new BaseCache(client)
}
return CACHE
}
function makeCacheKey(db: PouchDB.Database, key: string) {
return db.name + key
}
function makeCacheItem(doc: any, lastWrite: number | null = null): CacheItem {
return { doc, lastWrite: lastWrite || Date.now() }
}
export async function put(
db: PouchDB.Database,
doc: any,
writeRateMs: number = DEFAULT_WRITE_RATE_MS
) {
const cache = await getCache()
const key = doc._id
let cacheItem: CacheItem | undefined = await cache.get(makeCacheKey(db, key))
const updateDb = !cacheItem || cacheItem.lastWrite < Date.now() - writeRateMs
let output = doc
if (updateDb) {
const writeDb = async (toWrite: any) => {
// doc should contain the _id and _rev
const response = await db.put(toWrite)
output = {
...doc,
_id: response.id,
_rev: response.rev,
}
}
try {
await writeDb(doc)
} catch (err: any) {
if (err.status !== 409) {
throw err
} else {
// get the rev, update over it - this is risky, may change in future
const readDoc = await db.get(doc._id)
doc._rev = readDoc._rev
await writeDb(doc)
}
}
}
// if we are updating the DB then need to set the lastWrite to now
cacheItem = makeCacheItem(output, updateDb ? null : cacheItem?.lastWrite)
await cache.store(makeCacheKey(db, key), cacheItem)
return { ok: true, id: output._id, rev: output._rev }
}
export async function get(db: PouchDB.Database, id: string): Promise<any> {
const cache = await getCache()
const cacheKey = makeCacheKey(db, id)
let cacheItem: CacheItem = await cache.get(cacheKey)
if (!cacheItem) {
const doc = await db.get(id)
cacheItem = makeCacheItem(doc)
await cache.store(cacheKey, cacheItem)
}
return cacheItem.doc
}
export async function remove(
db: PouchDB.Database,
docOrId: any,
rev?: any
): Promise<void> {
const cache = await getCache()
if (!docOrId) {
throw new Error("No ID/Rev provided.")
}
const id = typeof docOrId === "string" ? docOrId : docOrId._id
rev = typeof docOrId === "string" ? rev : docOrId._rev
try {
await cache.delete(makeCacheKey(db, id))
} finally {
await db.remove(id, rev)
}
}
export class Writethrough {
db: PouchDB.Database
writeRateMs: number
constructor(
db: PouchDB.Database,
writeRateMs: number = DEFAULT_WRITE_RATE_MS
) {
this.db = db
this.writeRateMs = writeRateMs
}
async put(doc: any) {
return put(this.db, doc, this.writeRateMs)
}
async get(id: string) {
return get(this.db, id)
}
async remove(docOrId: any, rev?: any) {
return remove(this.db, docOrId, rev)
}
}

View File

@ -314,6 +314,7 @@ function getContextDB(key, opts) {
toUseAppId = getDevelopmentAppID(appId)
break
}
db = dangerousGetDB(toUseAppId, opts)
try {
cls.setOnContext(key, db)

View File

@ -1,41 +0,0 @@
exports.SEPARATOR = "_"
const PRE_APP = "app"
const PRE_DEV = "dev"
exports.DocumentTypes = {
USER: "us",
WORKSPACE: "workspace",
CONFIG: "config",
TEMPLATE: "template",
APP: PRE_APP,
DEV: PRE_DEV,
APP_DEV: `${PRE_APP}${exports.SEPARATOR}${PRE_DEV}`,
APP_METADATA: `${PRE_APP}${exports.SEPARATOR}metadata`,
ROLE: "role",
MIGRATIONS: "migrations",
DEV_INFO: "devinfo",
}
exports.StaticDatabases = {
GLOBAL: {
name: "global-db",
docs: {
apiKeys: "apikeys",
usageQuota: "usage_quota",
licenseInfo: "license_info",
},
},
// contains information about tenancy and so on
PLATFORM_INFO: {
name: "global-info",
docs: {
tenants: "tenants",
install: "install",
},
},
}
exports.APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
exports.APP_DEV = exports.APP_DEV_PREFIX =
exports.DocumentTypes.APP_DEV + exports.SEPARATOR

View File

@ -0,0 +1,58 @@
export const SEPARATOR = "_"
export const UNICODE_MAX = "\ufff0"
/**
* Can be used to create a few different forms of querying a view.
*/
export enum AutomationViewModes {
ALL = "all",
AUTOMATION = "automation",
STATUS = "status",
}
export enum ViewNames {
USER_BY_EMAIL = "by_email",
BY_API_KEY = "by_api_key",
USER_BY_BUILDERS = "by_builders",
LINK = "by_link",
ROUTING = "screen_routes",
AUTOMATION_LOGS = "automation_logs",
}
export enum DocumentTypes {
USER = "us",
WORKSPACE = "workspace",
CONFIG = "config",
TEMPLATE = "template",
APP = "app",
DEV = "dev",
APP_DEV = "app_dev",
APP_METADATA = "app_metadata",
ROLE = "role",
MIGRATIONS = "migrations",
DEV_INFO = "devinfo",
AUTOMATION_LOG = "log_au",
}
export const StaticDatabases = {
GLOBAL: {
name: "global-db",
docs: {
apiKeys: "apikeys",
usageQuota: "usage_quota",
licenseInfo: "license_info",
},
},
// contains information about tenancy and so on
PLATFORM_INFO: {
name: "global-info",
docs: {
tenants: "tenants",
install: "install",
},
},
}
export const APP_PREFIX = exports.DocumentTypes.APP + exports.SEPARATOR
export const APP_DEV = exports.DocumentTypes.APP_DEV + exports.SEPARATOR
export const APP_DEV_PREFIX = APP_DEV

View File

@ -1,21 +1,42 @@
const PouchDB = require("pouchdb")
const env = require("../environment")
function getUrlInfo() {
let url = env.COUCH_DB_URL
let username, password, host
const [protocol, rest] = url.split("://")
if (url.includes("@")) {
const hostParts = rest.split("@")
host = hostParts[1]
const authParts = hostParts[0].split(":")
username = authParts[0]
password = authParts[1]
} else {
host = rest
exports.getUrlInfo = (url = env.COUCH_DB_URL) => {
let cleanUrl, username, password, host
if (url) {
// Ensure the URL starts with a protocol
const protoRegex = /^https?:\/\//i
if (!protoRegex.test(url)) {
url = `http://${url}`
}
// Split into protocol and remainder
const split = url.split("://")
const protocol = split[0]
const rest = split.slice(1).join("://")
// Extract auth if specified
if (url.includes("@")) {
// Split into host and remainder
let parts = rest.split("@")
host = parts[parts.length - 1]
let auth = parts.slice(0, -1).join("@")
// Split auth into username and password
if (auth.includes(":")) {
const authParts = auth.split(":")
username = authParts[0]
password = authParts.slice(1).join(":")
} else {
username = auth
}
} else {
host = rest
}
cleanUrl = `${protocol}://${host}`
}
return {
url: `${protocol}://${host}`,
url: cleanUrl,
auth: {
username,
password,
@ -24,7 +45,7 @@ function getUrlInfo() {
}
exports.getCouchInfo = () => {
const urlInfo = getUrlInfo()
const urlInfo = exports.getUrlInfo()
let username
let password
if (env.COUCH_DB_USERNAME) {

View File

@ -0,0 +1,62 @@
require("../../../tests/utilities/TestConfiguration")
const getUrlInfo = require("../pouch").getUrlInfo
describe("pouch", () => {
describe("Couch DB URL parsing", () => {
it("should handle a null Couch DB URL", () => {
const info = getUrlInfo(null)
expect(info.url).toBeUndefined()
expect(info.auth.username).toBeUndefined()
})
it("should be able to parse a basic Couch DB URL", () => {
const info = getUrlInfo("http://host.com")
expect(info.url).toBe("http://host.com")
expect(info.auth.username).toBeUndefined()
})
it("should be able to parse a Couch DB basic URL with HTTPS", () => {
const info = getUrlInfo("https://host.com")
expect(info.url).toBe("https://host.com")
expect(info.auth.username).toBeUndefined()
})
it("should be able to parse a basic Couch DB URL with a custom port", () => {
const info = getUrlInfo("https://host.com:1234")
expect(info.url).toBe("https://host.com:1234")
expect(info.auth.username).toBeUndefined()
})
it("should be able to parse a Couch DB URL with auth", () => {
const info = getUrlInfo("https://user:pass@host.com:1234")
expect(info.url).toBe("https://host.com:1234")
expect(info.auth.username).toBe("user")
expect(info.auth.password).toBe("pass")
})
it("should be able to parse a Couch DB URL with auth and special chars", () => {
const info = getUrlInfo("https://user:s:p@s://@://:d@;][~s@host.com:1234")
expect(info.url).toBe("https://host.com:1234")
expect(info.auth.username).toBe("user")
expect(info.auth.password).toBe("s:p@s://@://:d@;][~s")
})
it("should be able to parse a Couch DB URL without a protocol", () => {
const info = getUrlInfo("host.com:1234")
expect(info.url).toBe("http://host.com:1234")
expect(info.auth.username).toBeUndefined()
})
it("should be able to parse a Couch DB URL with auth and without a protocol", () => {
const info = getUrlInfo("user:s:p@s://@://:d@;][~s@host.com:1234")
expect(info.url).toBe("http://host.com:1234")
expect(info.auth.username).toBe("user")
expect(info.auth.password).toBe("s:p@s://@://:d@;][~s")
})
it("should be able to parse a Couch DB URL with only username auth", () => {
const info = getUrlInfo("https://user@host.com:1234")
expect(info.url).toBe("https://host.com:1234")
expect(info.auth.username).toBe("user")
expect(info.auth.password).toBeUndefined()
})
it("should be able to parse a Couch DB URL with only username auth and without a protocol", () => {
const info = getUrlInfo("user@host.com:1234")
expect(info.url).toBe("http://host.com:1234")
expect(info.auth.username).toBe("user")
expect(info.auth.password).toBeUndefined()
})
})
})

View File

@ -1,7 +1,7 @@
import { newid } from "../hashing"
import { DEFAULT_TENANT_ID, Configs } from "../constants"
import env from "../environment"
import { SEPARATOR, DocumentTypes } from "./constants"
import { SEPARATOR, DocumentTypes, UNICODE_MAX, ViewNames } from "./constants"
import { getTenantId, getGlobalDBName, getGlobalDB } from "../tenancy"
import fetch from "node-fetch"
import { doWithDB, allDbs } from "./index"
@ -12,14 +12,6 @@ import { isDevApp, isDevAppID } from "./conversions"
import { APP_PREFIX } from "./constants"
import * as events from "../events"
const UNICODE_MAX = "\ufff0"
export const ViewNames = {
USER_BY_EMAIL: "by_email",
BY_API_KEY: "by_api_key",
USER_BY_BUILDERS: "by_builders",
}
export * from "./constants"
export * from "./conversions"
export { default as Replication } from "./Replication"
@ -63,6 +55,13 @@ export function getDocParams(
}
}
/**
* Retrieve the correct index for a view based on default design DB.
*/
export function getQueryIndex(viewName: ViewNames) {
return `database/${viewName}`
}
/**
* Generates a new workspace ID.
* @returns {string} The new workspace ID which the workspace doc can be stored under.
@ -93,13 +92,17 @@ export function generateGlobalUserID(id?: any) {
/**
* Gets parameters for retrieving users.
*/
export function getGlobalUserParams(globalId: any, otherProps = {}) {
export function getGlobalUserParams(globalId: any, otherProps: any = {}) {
if (!globalId) {
globalId = ""
}
const startkey = otherProps?.startkey
return {
...otherProps,
startkey: `${DocumentTypes.USER}${SEPARATOR}${globalId}`,
// need to include this incase pagination
startkey: startkey
? startkey
: `${DocumentTypes.USER}${SEPARATOR}${globalId}`,
endkey: `${DocumentTypes.USER}${SEPARATOR}${globalId}${UNICODE_MAX}`,
}
}
@ -384,7 +387,9 @@ export const getScopedFullConfig = async function (
if (type === Configs.SETTINGS) {
if (scopedConfig && scopedConfig.doc) {
// overrides affected by environment variables
scopedConfig.doc.config.platformUrl = await getPlatformUrl()
scopedConfig.doc.config.platformUrl = await getPlatformUrl({
tenantAware: true,
})
scopedConfig.doc.config.analyticsEnabled =
await events.analytics.enabled()
} else {
@ -393,7 +398,7 @@ export const getScopedFullConfig = async function (
doc: {
_id: generateConfigID({ type, user, workspace }),
config: {
platformUrl: await getPlatformUrl(),
platformUrl: await getPlatformUrl({ tenantAware: true }),
analyticsEnabled: await events.analytics.enabled(),
},
},
@ -434,6 +439,26 @@ export const getPlatformUrl = async (opts = { tenantAware: true }) => {
return platformUrl
}
export function pagination(
data: any[],
pageSize: number,
{ paginate, property } = { paginate: true, property: "_id" }
) {
if (!paginate) {
return { data, hasNextPage: false }
}
const hasNextPage = data.length > pageSize
let nextPage = undefined
if (hasNextPage) {
nextPage = property ? data[pageSize]?.[property] : data[pageSize]?._id
}
return {
data: data.slice(0, pageSize),
hasNextPage,
nextPage,
}
}
export async function getScopedConfig(db: any, params: any) {
const configDoc = await getScopedFullConfig(db, params)
return configDoc && configDoc.config ? configDoc.config : configDoc

View File

@ -16,7 +16,7 @@ if (!LOADED && isDev() && !isTest()) {
LOADED = true
}
const env: any = {
const env = {
isTest,
isDev,
JWT_SECRET: process.env.JWT_SECRET,
@ -40,7 +40,7 @@ const env: any = {
DISABLE_ACCOUNT_PORTAL: process.env.DISABLE_ACCOUNT_PORTAL,
SELF_HOSTED: !!parseInt(process.env.SELF_HOSTED || ""),
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
PLATFORM_URL: process.env.PLATFORM_URL,
PLATFORM_URL: process.env.PLATFORM_URL || "",
POSTHOG_TOKEN: process.env.POSTHOG_TOKEN,
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
TENANT_FEATURE_FLAGS: process.env.TENANT_FEATURE_FLAGS,
@ -66,6 +66,7 @@ const env: any = {
for (let [key, value] of Object.entries(env)) {
// handle the edge case of "0" to disable an environment variable
if (value === "0") {
// @ts-ignore
env[key] = 0
}
}

View File

@ -13,6 +13,7 @@ import deprovisioning from "./context/deprovision"
import auth from "./auth"
import constants from "./constants"
import * as dbConstants from "./db/constants"
import logging from "./logging"
// mimic the outer package exports
import * as db from "./pkg/db"
@ -49,6 +50,7 @@ const core = {
deprovisioning,
installation,
errors,
logging,
...errorClasses,
}

View File

@ -1,10 +1,10 @@
const NonErrors = ["AccountError"]
function isSuppressed(e) {
function isSuppressed(e?: any) {
return e && e["suppressAlert"]
}
module.exports.logAlert = (message, e) => {
export function logAlert(message: string, e?: any) {
if (e && NonErrors.includes(e.name) && isSuppressed(e)) {
return
}
@ -14,3 +14,7 @@ module.exports.logAlert = (message, e) => {
}
console.error(`bb-alert: ${message} ${errorJson}`)
}
export default {
logAlert,
}

View File

@ -94,7 +94,6 @@ module.exports = (
user = await getUser(userId, session.tenantId)
}
user.csrfToken = session.csrfToken
delete user.password
authenticated = true
} catch (err) {
error = err
@ -128,6 +127,8 @@ module.exports = (
}
if (!user && tenantId) {
user = { tenantId }
} else {
delete user.password
}
// be explicit
if (authenticated !== true) {

View File

@ -2,7 +2,7 @@ const jwt = require("./passport/jwt")
const local = require("./passport/local")
const google = require("./passport/google")
const oidc = require("./passport/oidc")
const { authError } = require("./passport/utils")
const { authError, ssoCallbackUrl } = require("./passport/utils")
const authenticated = require("./authenticated")
const auditLog = require("./auditLog")
const tenancy = require("./tenancy")
@ -20,6 +20,7 @@ module.exports = {
tenancy,
authError,
internalApi,
ssoCallbackUrl,
datasource: {
google: datasourceGoogle,
},

View File

@ -1,6 +1,7 @@
const GoogleStrategy = require("passport-google-oauth").OAuth2Strategy
const { ssoCallbackUrl } = require("./utils")
const { authenticateThirdParty } = require("./third-party-common")
const { Configs } = require("../../../constants")
const buildVerifyFn = saveUserFn => {
return (accessToken, refreshToken, profile, done) => {
@ -57,5 +58,10 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
)
}
}
exports.getCallbackUrl = async function (db, config) {
return ssoCallbackUrl(db, config, Configs.GOOGLE)
}
// expose for testing
exports.buildVerifyFn = buildVerifyFn

View File

@ -55,6 +55,7 @@ exports.authenticate = async function (ctx, email, password, done) {
if (await compare(password, dbUser.password)) {
const sessionId = newid()
const tenantId = getTenantId()
await createASession(dbUser._id, { sessionId, tenantId })
dbUser.token = jwt.sign(

View File

@ -1,6 +1,8 @@
const fetch = require("node-fetch")
const OIDCStrategy = require("@techpass/passport-openidconnect").Strategy
const { authenticateThirdParty } = require("./third-party-common")
const { ssoCallbackUrl } = require("./utils")
const { Configs } = require("../../../constants")
const buildVerifyFn = saveUserFn => {
/**
@ -89,11 +91,24 @@ function validEmail(value) {
* from couchDB rather than environment variables, using this factory is necessary for dynamically configuring passport.
* @returns Dynamically configured Passport OIDC Strategy
*/
exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
exports.strategyFactory = async function (config, saveUserFn) {
try {
const { clientID, clientSecret, configUrl } = config
const verify = buildVerifyFn(saveUserFn)
const strategy = new OIDCStrategy(config, verify)
strategy.name = "oidc"
return strategy
} catch (err) {
console.error(err)
throw new Error("Error constructing OIDC authentication strategy", err)
}
}
exports.fetchStrategyConfig = async function (enrichedConfig, callbackUrl) {
try {
const { clientID, clientSecret, configUrl } = enrichedConfig
if (!clientID || !clientSecret || !callbackUrl || !configUrl) {
//check for remote config and all required elements
throw new Error(
"Configuration invalid. Must contain clientID, clientSecret, callbackUrl and configUrl"
)
@ -109,24 +124,24 @@ exports.strategyFactory = async function (config, callbackUrl, saveUserFn) {
const body = await response.json()
const verify = buildVerifyFn(saveUserFn)
return new OIDCStrategy(
{
issuer: body.issuer,
authorizationURL: body.authorization_endpoint,
tokenURL: body.token_endpoint,
userInfoURL: body.userinfo_endpoint,
clientID: clientID,
clientSecret: clientSecret,
callbackURL: callbackUrl,
},
verify
)
return {
issuer: body.issuer,
authorizationURL: body.authorization_endpoint,
tokenURL: body.token_endpoint,
userInfoURL: body.userinfo_endpoint,
clientID: clientID,
clientSecret: clientSecret,
callbackURL: callbackUrl,
}
} catch (err) {
console.error(err)
throw new Error("Error constructing OIDC authentication strategy", err)
throw new Error("Error constructing OIDC authentication configuration", err)
}
}
exports.getCallbackUrl = async function (db, config) {
return ssoCallbackUrl(db, config, Configs.OIDC)
}
// expose for testing
exports.buildVerifyFn = buildVerifyFn

View File

@ -48,8 +48,8 @@ describe("oidc", () => {
it("should create successfully create an oidc strategy", async () => {
const oidc = require("../oidc")
await oidc.strategyFactory(oidcConfig, callbackUrl)
const enrichedConfig = await oidc.fetchStrategyConfig(oidcConfig, callbackUrl)
await oidc.strategyFactory(enrichedConfig, callbackUrl)
expect(mockFetch).toHaveBeenCalledWith(oidcConfig.configUrl)

View File

@ -1,3 +1,7 @@
const { isMultiTenant, getTenantId } = require("../../tenancy")
const { getScopedConfig } = require("../../db/utils")
const { Configs } = require("../../constants")
/**
* Utility to handle authentication errors.
*
@ -5,6 +9,7 @@
* @param {*} message Message that will be returned in the response body
* @param {*} err (Optional) error that will be logged
*/
exports.authError = function (done, message, err = null) {
return done(
err,
@ -12,3 +17,21 @@ exports.authError = function (done, message, err = null) {
{ message: message }
)
}
exports.ssoCallbackUrl = async (db, config, type) => {
// incase there is a callback URL from before
if (config && config.callbackURL) {
return config.callbackURL
}
const publicConfig = await getScopedConfig(db, {
type: Configs.SETTINGS,
})
let callbackUrl = `/api/global/auth`
if (isMultiTenant()) {
callbackUrl += `/${getTenantId()}`
}
callbackUrl += `/${type}/callback`
return `${publicConfig.platformUrl}${callbackUrl}`
}

View File

@ -294,6 +294,16 @@ export const uploadDirectory = async (
await Promise.all(uploads)
}
exports.downloadTarballDirect = async (url: string, path: string) => {
path = sanitizeKey(path)
const response = await fetch(url)
if (!response.ok) {
throw new Error(`unexpected response ${response.statusText}`)
}
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
}
export const downloadTarball = async (url: any, bucketName: any, path: any) => {
bucketName = sanitizeBucket(bucketName)
path = sanitizeKey(path)

View File

@ -2,7 +2,7 @@
// The outer exports can't be used as they now reference dist directly
import Client from "../redis"
import utils from "../redis/utils"
import clients from "../redis/authRedis"
import clients from "../redis/init"
export = {
Client,

View File

@ -1,3 +1,4 @@
import RedisWrapper from "../redis"
const env = require("../environment")
// ioredis mock is all in memory
const Redis = env.isTest() ? require("ioredis-mock") : require("ioredis")
@ -6,24 +7,34 @@ const {
removeDbPrefix,
getRedisOptions,
SEPARATOR,
SelectableDatabases,
} = require("./utils")
const RETRY_PERIOD_MS = 2000
const STARTUP_TIMEOUT_MS = 5000
const CLUSTERED = false
const DEFAULT_SELECT_DB = SelectableDatabases.DEFAULT
// for testing just generate the client once
let CLOSED = false
let CLIENT = env.isTest() ? new Redis(getRedisOptions()) : null
let CLIENTS: { [key: number]: any } = {}
// if in test always connected
let CONNECTED = !!env.isTest()
let CONNECTED = env.isTest()
function connectionError(timeout, err) {
function pickClient(selectDb: number): any {
return CLIENTS[selectDb]
}
function connectionError(
selectDb: number,
timeout: NodeJS.Timeout,
err: Error | string
) {
// manually shut down, ignore errors
if (CLOSED) {
return
}
CLIENT.disconnect()
pickClient(selectDb).disconnect()
CLOSED = true
// always clear this on error
clearTimeout(timeout)
@ -38,59 +49,69 @@ function connectionError(timeout, err) {
* Inits the system, will error if unable to connect to redis cluster (may take up to 10 seconds) otherwise
* will return the ioredis client which will be ready to use.
*/
function init() {
let timeout
function init(selectDb = DEFAULT_SELECT_DB) {
let timeout: NodeJS.Timeout
CLOSED = false
// testing uses a single in memory client
if (env.isTest() || (CLIENT && CONNECTED)) {
let client = pickClient(selectDb)
// already connected, ignore
if (client && CONNECTED) {
return
}
// testing uses a single in memory client
if (env.isTest()) {
CLIENTS[selectDb] = new Redis(getRedisOptions())
}
// start the timer - only allowed 5 seconds to connect
timeout = setTimeout(() => {
if (!CONNECTED) {
connectionError(timeout, "Did not successfully connect in timeout")
connectionError(
selectDb,
timeout,
"Did not successfully connect in timeout"
)
}
}, STARTUP_TIMEOUT_MS)
// disconnect any lingering client
if (CLIENT) {
CLIENT.disconnect()
if (client) {
client.disconnect()
}
const { redisProtocolUrl, opts, host, port } = getRedisOptions(CLUSTERED)
if (CLUSTERED) {
CLIENT = new Redis.Cluster([{ host, port }], opts)
client = new Redis.Cluster([{ host, port }], opts)
} else if (redisProtocolUrl) {
CLIENT = new Redis(redisProtocolUrl)
client = new Redis(redisProtocolUrl)
} else {
CLIENT = new Redis(opts)
client = new Redis(opts)
}
// attach handlers
CLIENT.on("end", err => {
connectionError(timeout, err)
client.on("end", (err: Error) => {
connectionError(selectDb, timeout, err)
})
CLIENT.on("error", err => {
connectionError(timeout, err)
client.on("error", (err: Error) => {
connectionError(selectDb, timeout, err)
})
CLIENT.on("connect", () => {
client.on("connect", () => {
clearTimeout(timeout)
CONNECTED = true
})
CLIENTS[selectDb] = client
}
function waitForConnection() {
function waitForConnection(selectDb: number = DEFAULT_SELECT_DB) {
return new Promise(resolve => {
if (CLIENT == null) {
if (pickClient(selectDb) == null) {
init()
} else if (CONNECTED) {
resolve()
resolve("")
return
}
// check if the connection is ready
const interval = setInterval(() => {
if (CONNECTED) {
clearInterval(interval)
resolve()
resolve("")
}
}, 500)
})
@ -100,25 +121,26 @@ function waitForConnection() {
* Utility function, takes a redis stream and converts it to a promisified response -
* this can only be done with redis streams because they will have an end.
* @param stream A redis stream, specifically as this type of stream will have an end.
* @param client The client to use for further lookups.
* @return {Promise<object>} The final output of the stream
*/
function promisifyStream(stream) {
function promisifyStream(stream: any, client: RedisWrapper) {
return new Promise((resolve, reject) => {
const outputKeys = new Set()
stream.on("data", keys => {
stream.on("data", (keys: string[]) => {
keys.forEach(key => {
outputKeys.add(key)
})
})
stream.on("error", err => {
stream.on("error", (err: Error) => {
reject(err)
})
stream.on("end", async () => {
const keysArray = Array.from(outputKeys)
const keysArray: string[] = Array.from(outputKeys) as string[]
try {
let getPromises = []
for (let key of keysArray) {
getPromises.push(CLIENT.get(key))
getPromises.push(client.get(key))
}
const jsonArray = await Promise.all(getPromises)
resolve(
@ -134,48 +156,52 @@ function promisifyStream(stream) {
})
}
class RedisWrapper {
constructor(db) {
export = class RedisWrapper {
_db: string
_select: number
constructor(db: string, selectDb: number | null = null) {
this._db = db
this._select = selectDb || DEFAULT_SELECT_DB
}
getClient() {
return CLIENT
return pickClient(this._select)
}
async init() {
CLOSED = false
init()
await waitForConnection()
init(this._select)
await waitForConnection(this._select)
return this
}
async finish() {
CLOSED = true
CLIENT.disconnect()
this.getClient().disconnect()
}
async scan(key = "") {
async scan(key = ""): Promise<any> {
const db = this._db
key = `${db}${SEPARATOR}${key}`
let stream
if (CLUSTERED) {
let node = CLIENT.nodes("master")
let node = this.getClient().nodes("master")
stream = node[0].scanStream({ match: key + "*", count: 100 })
} else {
stream = CLIENT.scanStream({ match: key + "*", count: 100 })
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
}
return promisifyStream(stream)
return promisifyStream(stream, this.getClient())
}
async keys(pattern) {
async keys(pattern: string) {
const db = this._db
return CLIENT.keys(addDbPrefix(db, pattern))
return this.getClient().keys(addDbPrefix(db, pattern))
}
async get(key) {
async get(key: string) {
const db = this._db
let response = await CLIENT.get(addDbPrefix(db, key))
let response = await this.getClient().get(addDbPrefix(db, key))
// overwrite the prefixed key
if (response != null && response.key) {
response.key = key
@ -188,39 +214,37 @@ class RedisWrapper {
}
}
async store(key, value, expirySeconds = null) {
async store(key: string, value: any, expirySeconds: number | null = null) {
const db = this._db
if (typeof value === "object") {
value = JSON.stringify(value)
}
const prefixedKey = addDbPrefix(db, key)
await CLIENT.set(prefixedKey, value)
await this.getClient().set(prefixedKey, value)
if (expirySeconds) {
await CLIENT.expire(prefixedKey, expirySeconds)
await this.getClient().expire(prefixedKey, expirySeconds)
}
}
async getTTL(key) {
async getTTL(key: string) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
return CLIENT.ttl(prefixedKey)
return this.getClient().ttl(prefixedKey)
}
async setExpiry(key, expirySeconds) {
async setExpiry(key: string, expirySeconds: number | null) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
await CLIENT.expire(prefixedKey, expirySeconds)
await this.getClient().expire(prefixedKey, expirySeconds)
}
async delete(key) {
async delete(key: string) {
const db = this._db
await CLIENT.del(addDbPrefix(db, key))
await this.getClient().del(addDbPrefix(db, key))
}
async clear() {
let items = await this.scan()
await Promise.all(items.map(obj => this.delete(obj.key)))
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
}
}
module.exports = RedisWrapper

View File

@ -2,7 +2,7 @@ const Client = require("./index")
const utils = require("./utils")
const { getRedlock } = require("./redlock")
let userClient, sessionClient, appClient, cacheClient
let userClient, sessionClient, appClient, cacheClient, writethroughClient
let migrationsRedlock
// turn retry off so that only one instance can ever hold the lock
@ -13,6 +13,10 @@ async function init() {
sessionClient = await new Client(utils.Databases.SESSIONS).init()
appClient = await new Client(utils.Databases.APP_METADATA).init()
cacheClient = await new Client(utils.Databases.GENERIC_CACHE).init()
writethroughClient = await new Client(
utils.Databases.WRITE_THROUGH,
utils.SelectableDatabases.WRITE_THROUGH
).init()
// pass the underlying ioredis client to redlock
migrationsRedlock = getRedlock(
cacheClient.getClient(),
@ -25,6 +29,7 @@ process.on("exit", async () => {
if (sessionClient) await sessionClient.finish()
if (appClient) await appClient.finish()
if (cacheClient) await cacheClient.finish()
if (writethroughClient) await writethroughClient.finish()
})
module.exports = {
@ -52,6 +57,12 @@ module.exports = {
}
return cacheClient
},
getWritethroughClient: async () => {
if (!writethroughClient) {
await init()
}
return writethroughClient
},
getMigrationsRedlock: async () => {
if (!migrationsRedlock) {
await init()

View File

@ -6,6 +6,14 @@ const SEPARATOR = "-"
const REDIS_URL = !env.REDIS_URL ? "localhost:6379" : env.REDIS_URL
const REDIS_PASSWORD = !env.REDIS_PASSWORD ? "budibase" : env.REDIS_PASSWORD
/**
* These Redis databases help us to segment up a Redis keyspace by prepending the
* specified database name onto the cache key. This means that a single real Redis database
* can be split up a bit; allowing us to use scans on small databases to find some particular
* keys within.
* If writing a very large volume of keys is expected (say 10K+) then it is better to keep these out
* of the default keyspace and use a separate one - the SelectableDatabases can be used for this.
*/
exports.Databases = {
PW_RESETS: "pwReset",
VERIFICATIONS: "verification",
@ -19,6 +27,35 @@ exports.Databases = {
QUERY_VARS: "queryVars",
LICENSES: "license",
GENERIC_CACHE: "data_cache",
WRITE_THROUGH: "writeThrough",
}
/**
* These define the numeric Redis databases that can be access with the SELECT command -
* (https://redis.io/commands/select/). By default a Redis server/cluster will have 16 selectable
* databases, increasing this count increases the amount of CPU/memory required to run the server.
* Ideally new Redis keyspaces should be used sparingly, only when absolutely necessary for performance
* to be maintained. Generally a keyspace can grow to be very large is scans are not needed or desired,
* but if you need to walk through all values in a database periodically then a separate selectable
* keyspace should be used.
*/
exports.SelectableDatabases = {
DEFAULT: 0,
WRITE_THROUGH: 1,
UNUSED_1: 2,
UNUSED_2: 3,
UNUSED_3: 4,
UNUSED_4: 5,
UNUSED_5: 6,
UNUSED_6: 7,
UNUSED_7: 8,
UNUSED_8: 9,
UNUSED_9: 10,
UNUSED_10: 11,
UNUSED_11: 12,
UNUSED_12: 13,
UNUSED_13: 14,
UNUSED_14: 15,
}
exports.SEPARATOR = SEPARATOR

View File

@ -1,4 +1,4 @@
const redis = require("../redis/authRedis")
const redis = require("../redis/init")
const { v4: uuidv4 } = require("uuid")
// a week in seconds

View File

@ -1,5 +1,6 @@
const { ViewNames } = require("./db/utils")
const { queryGlobalView } = require("./db/views")
const { UNICODE_MAX } = require("./db/constants")
/**
* Given an email address this will use a view to search through
@ -19,3 +20,24 @@ exports.getGlobalUserByEmail = async email => {
return response
}
/**
* Performs a starts with search on the global email view.
*/
exports.searchGlobalUsersByEmail = async (email, opts) => {
if (typeof email !== "string") {
throw new Error("Must provide a string to search by")
}
const lcEmail = email.toLowerCase()
// handle if passing up startkey for pagination
const startkey = opts && opts.startkey ? opts.startkey : lcEmail
let response = await queryGlobalView(ViewNames.USER_BY_EMAIL, {
...opts,
startkey,
endkey: `${lcEmail}${UNICODE_MAX}`,
})
if (!response) {
response = []
}
return Array.isArray(response) ? response : [response]
}

View File

@ -197,3 +197,7 @@ exports.platformLogout = async ({ ctx, userId, keepActiveSession }) => {
await events.auth.logout()
await userCache.invalidateUser(userId)
}
exports.timeout = timeMs => {
return new Promise(resolve => setTimeout(resolve, timeMs))
}

View File

@ -656,6 +656,13 @@
"@types/keygrip" "*"
"@types/node" "*"
"@types/debug@*":
version "4.1.7"
resolved "https://registry.yarnpkg.com/@types/debug/-/debug-4.1.7.tgz#7cc0ea761509124709b8b2d1090d8f6c17aadb82"
integrity sha512-9AonUzyTjXXhEOa0DnqpzZi6VHlqKMswga9EXjpXnnqxwLtdvPPtlO8evrI5D9S6asFRCQ6v+wpiUKbw+vKqyg==
dependencies:
"@types/ms" "*"
"@types/express-serve-static-core@^4.17.18":
version "4.17.28"
resolved "https://registry.yarnpkg.com/@types/express-serve-static-core/-/express-serve-static-core-4.17.28.tgz#c47def9f34ec81dc6328d0b1b5303d1ec98d86b8"
@ -762,6 +769,11 @@
resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==
"@types/ms@*":
version "0.7.31"
resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197"
integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA==
"@types/node-fetch@2.6.1":
version "2.6.1"
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.1.tgz#8f127c50481db65886800ef496f20bbf15518975"
@ -780,6 +792,152 @@
resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.20.tgz#268f028b36eaf51181c3300252f605488c4f0650"
integrity sha512-Q8KKwm9YqEmUBRsqJ2GWJDtXltBDxTdC4m5vTdXBolu2PeQh8LX+f6BTwU+OuXPu37fLxoN6gidqBmnky36FXA==
"@types/pouchdb-adapter-cordova-sqlite@*":
version "1.0.1"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-cordova-sqlite/-/pouchdb-adapter-cordova-sqlite-1.0.1.tgz#49e5ee6df7cc0c23196fcb340f43a560e74eb1d6"
integrity sha512-nqlXpW1ho3KBg1mUQvZgH2755y3z/rw4UA7ZJCPMRTHofxGMY8izRVw5rHBL4/7P615or0J2udpRYxgkT3D02g==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-fruitdown@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-fruitdown/-/pouchdb-adapter-fruitdown-6.1.3.tgz#9b140ad9645cc56068728acf08ec19ac0046658e"
integrity sha512-Wz1Z1JLOW1hgmFQjqnSkmyyfH7by/iWb4abKn684WMvQfmxx6BxKJpJ4+eulkVPQzzgMMSgU1MpnQOm9FgRkbw==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-http@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-http/-/pouchdb-adapter-http-6.1.3.tgz#6e592d5f48deb6274a21ddac1498dd308096bcf3"
integrity sha512-9Z4TLbF/KJWy/D2sWRPBA+RNU0odQimfdvlDX+EY7rGcd3aVoH8qjD/X0Xcd/0dfBH5pKrNIMFFQgW/TylRCmA==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-idb@*":
version "6.1.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-idb/-/pouchdb-adapter-idb-6.1.4.tgz#cb9a18864585d600820cd325f007614c5c3989cd"
integrity sha512-KIAXbkF4uYUz0ZwfNEFLtEkK44mEWopAsD76UhucH92XnJloBysav+TjI4FFfYQyTjoW3S1s6V+Z14CUJZ0F6w==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-leveldb@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-leveldb/-/pouchdb-adapter-leveldb-6.1.3.tgz#17c7e75d75b992050bca15991e97fba575c61bb3"
integrity sha512-ex8NFqQGFwEpFi7AaZ5YofmuemfZNsL3nTFZBUCAKYMBkazQij1pe2ILLStSvJr0XS0qxgXjCEW19T5Wqiiskg==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-localstorage@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-localstorage/-/pouchdb-adapter-localstorage-6.1.3.tgz#0dde02ba6b9d6073a295a20196563942ba9a54bd"
integrity sha512-oor040tye1KKiGLWYtIy7rRT7C2yoyX3Tf6elEJRpjOA7Ja/H8lKc4LaSh9ATbptIcES6MRqZDxtp7ly9hsW3Q==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-memory@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-memory/-/pouchdb-adapter-memory-6.1.3.tgz#9eabdbc890fcf58960ee8b68b8685f837e75c844"
integrity sha512-gVbsIMzDzgZYThFVT4eVNsmuZwVm/4jDxP1sjlgc3qtDIxbtBhGgyNfcskwwz9Zu5Lv1avkDsIWvcxQhnvRlHg==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-node-websql@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-node-websql/-/pouchdb-adapter-node-websql-6.1.3.tgz#aa18bc68af8cf509acd12c400010dcd5fab2243d"
integrity sha512-F/P+os6Jsa7CgHtH64+Z0HfwIcj0hIRB5z8gNhF7L7dxPWoAfkopK5H2gydrP3sQrlGyN4WInF+UJW/Zu1+FKg==
dependencies:
"@types/pouchdb-adapter-websql" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-adapter-websql@*":
version "6.1.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-websql/-/pouchdb-adapter-websql-6.1.4.tgz#359fbe42ccac0ac90b492ddb8c32fafd0aa96d79"
integrity sha512-zMJQCtXC40hBsIDRn0GhmpeGMK0f9l/OGWfLguvczROzxxcOD7REI+e6SEmX7gJKw5JuMvlfuHzkQwjmvSJbtg==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-browser@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-browser/-/pouchdb-browser-6.1.3.tgz#8f33d6ef58d6817d1f6d36979148a1c7f63244d8"
integrity sha512-EdYowrWxW9SWBMX/rux2eq7dbHi5Zeyzz+FF/IAsgQKnUxgeCO5VO2j4zTzos0SDyJvAQU+EYRc11r7xGn5tvA==
dependencies:
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-adapter-idb" "*"
"@types/pouchdb-adapter-websql" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-mapreduce" "*"
"@types/pouchdb-replication" "*"
"@types/pouchdb-core@*":
version "7.0.10"
resolved "https://registry.yarnpkg.com/@types/pouchdb-core/-/pouchdb-core-7.0.10.tgz#d1ea1549e7fad6cb579f71459b1bc27252e06a5a"
integrity sha512-mKhjLlWWXyV3PTTjDhzDV1kc2dolO7VYFa75IoKM/hr8Er9eo8RIbS7mJLfC8r/C3p6ihZu9yZs1PWC1LQ0SOA==
dependencies:
"@types/debug" "*"
"@types/pouchdb-find" "*"
"@types/pouchdb-find@*":
version "6.3.7"
resolved "https://registry.yarnpkg.com/@types/pouchdb-find/-/pouchdb-find-6.3.7.tgz#f713534a53c1a7f3fd8fbbfb74131a1b04711ddc"
integrity sha512-b2dr9xoZRK5Mwl8UiRA9l5j9mmCxNfqXuu63H1KZHwJLILjoIIz7BntCvM0hnlnl7Q8P8wORq0IskuaMq5Nnnw==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-http@*":
version "6.1.3"
resolved "https://registry.yarnpkg.com/@types/pouchdb-http/-/pouchdb-http-6.1.3.tgz#09576c0d409da1f8dee34ec5b768415e2472ea52"
integrity sha512-0e9E5SqNOyPl/3FnEIbENssB4FlJsNYuOy131nxrZk36S+y1R/6qO7ZVRypWpGTqBWSuVd7gCsq2UDwO/285+w==
dependencies:
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-mapreduce@*":
version "6.1.7"
resolved "https://registry.yarnpkg.com/@types/pouchdb-mapreduce/-/pouchdb-mapreduce-6.1.7.tgz#9ab32d1e0f234f1bf6d1e4c5d7e216e9e23ac0a3"
integrity sha512-WzBwm7tmO9QhfRzVaWT4v6JQSS/fG2OoUDrWrhX87rPe2Pn6laPvdK5li6myNRxCoI/l5e8Jd+oYBAFnaiFucA==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-node@*":
version "6.1.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-node/-/pouchdb-node-6.1.4.tgz#5214c0169fcfd2237d373380bbd65a934feb5dfb"
integrity sha512-wnTCH8X1JOPpNOfVhz8HW0AvmdHh6pt40MuRj0jQnK7QEHsHS79WujsKTKSOF8QXtPwpvCNSsI7ut7H7tfxxJQ==
dependencies:
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-adapter-leveldb" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-mapreduce" "*"
"@types/pouchdb-replication" "*"
"@types/pouchdb-replication@*":
version "6.4.4"
resolved "https://registry.yarnpkg.com/@types/pouchdb-replication/-/pouchdb-replication-6.4.4.tgz#743406c90f13a988fa3e346ea74ce40acd170d00"
integrity sha512-BsE5LKpjJK4iAf6Fx5kyrMw+33V+Ip7uWldUnU2BYrrvtR+MLD22dcImm7DZN1st2wPPb91i0XEnQzvP0w1C/Q==
dependencies:
"@types/pouchdb-core" "*"
"@types/pouchdb-find" "*"
"@types/pouchdb@6.4.0":
version "6.4.0"
resolved "https://registry.yarnpkg.com/@types/pouchdb/-/pouchdb-6.4.0.tgz#f9c41ca64b23029f9bf2eb4bf6956e6431cb79f8"
integrity sha512-eGCpX+NXhd5VLJuJMzwe3L79fa9+IDTrAG3CPaf4s/31PD56hOrhDJTSmRELSXuiqXr6+OHzzP0PldSaWsFt7w==
dependencies:
"@types/pouchdb-adapter-cordova-sqlite" "*"
"@types/pouchdb-adapter-fruitdown" "*"
"@types/pouchdb-adapter-http" "*"
"@types/pouchdb-adapter-idb" "*"
"@types/pouchdb-adapter-leveldb" "*"
"@types/pouchdb-adapter-localstorage" "*"
"@types/pouchdb-adapter-memory" "*"
"@types/pouchdb-adapter-node-websql" "*"
"@types/pouchdb-adapter-websql" "*"
"@types/pouchdb-browser" "*"
"@types/pouchdb-core" "*"
"@types/pouchdb-http" "*"
"@types/pouchdb-mapreduce" "*"
"@types/pouchdb-node" "*"
"@types/pouchdb-replication" "*"
"@types/prettier@^2.1.5":
version "2.6.3"
resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.3.tgz#68ada76827b0010d0db071f739314fa429943d0a"
@ -3965,6 +4123,11 @@ passport-oauth1@1.x.x:
passport-strategy "1.x.x"
utils-merge "1.x.x"
passport-oauth2-refresh@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/passport-oauth2-refresh/-/passport-oauth2-refresh-2.1.0.tgz#c31cd133826383f5539d16ad8ab4f35ca73ce4a4"
integrity sha512-4ML7ooCESCqiTgdDBzNUFTBcPR8zQq9iM6eppEUGMMvLdsjqRL93jKwWm4Az3OJcI+Q2eIVyI8sVRcPFvxcF/A==
passport-oauth2@1.x.x:
version "1.6.1"
resolved "https://registry.yarnpkg.com/passport-oauth2/-/passport-oauth2-1.6.1.tgz#c5aee8f849ce8bd436c7f81d904a3cd1666f181b"

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "1.0.207-alpha.8",
"version": "1.0.220-alpha.4",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,7 +38,7 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "^1.2.1",
"@budibase/string-templates": "^1.0.207-alpha.8",
"@budibase/string-templates": "^1.0.220-alpha.4",
"@spectrum-css/actionbutton": "^1.0.1",
"@spectrum-css/actiongroup": "^1.0.1",
"@spectrum-css/avatar": "^3.0.2",
@ -66,11 +66,12 @@
"@spectrum-css/radio": "^3.0.2",
"@spectrum-css/search": "^3.0.2",
"@spectrum-css/sidenav": "^3.0.2",
"@spectrum-css/slider": "3.0.1",
"@spectrum-css/statuslight": "^3.0.2",
"@spectrum-css/stepper": "^3.0.3",
"@spectrum-css/switch": "^1.0.2",
"@spectrum-css/table": "^3.0.1",
"@spectrum-css/tabs": "^3.0.1",
"@spectrum-css/tabs": "^3.2.12",
"@spectrum-css/tags": "^3.0.2",
"@spectrum-css/textfield": "^3.0.1",
"@spectrum-css/toast": "^3.0.1",

View File

@ -13,6 +13,7 @@
export let size = "M"
export let active = false
export let fullWidth = false
export let noPadding = false
function longPress(element) {
if (!longPressable) return
@ -41,6 +42,7 @@
class:spectrum-ActionButton--quiet={quiet}
class:spectrum-ActionButton--emphasized={emphasized}
class:is-selected={selected}
class:noPadding
class:fullWidth
class="spectrum-ActionButton spectrum-ActionButton--size{size}"
class:active
@ -80,4 +82,14 @@
.active svg {
color: var(--spectrum-global-color-blue-600);
}
:global([dir="ltr"] .spectrum-ActionButton .spectrum-Icon) {
margin-left: 0;
}
.is-selected:not(.spectrum-ActionButton--emphasized) {
background: var(--spectrum-global-color-gray-300);
}
.noPadding {
padding: 0;
min-width: 0;
}
</style>

View File

@ -8,6 +8,7 @@
export let size = "S"
export let extraButtonText
export let extraButtonAction
export let showCloseButton = true
let show = true
@ -39,22 +40,24 @@
</button>
{/if}
</div>
<div class="spectrum-Toast-buttons">
<button
class="spectrum-ClearButton spectrum-ClearButton--overBackground spectrum-ClearButton--size{size}"
on:click={clear}
>
<div class="spectrum-ClearButton-fill">
<svg
class="spectrum-ClearButton-icon spectrum-Icon spectrum-UIIcon-Cross100"
focusable="false"
aria-hidden="true"
>
<use xlink:href="#spectrum-css-icon-Cross100" />
</svg>
</div>
</button>
</div>
{#if showCloseButton}
<div class="spectrum-Toast-buttons">
<button
class="spectrum-ClearButton spectrum-ClearButton--overBackground spectrum-ClearButton--size{size}"
on:click={clear}
>
<div class="spectrum-ClearButton-fill">
<svg
class="spectrum-ClearButton-icon spectrum-Icon spectrum-UIIcon-Cross100"
focusable="false"
aria-hidden="true"
>
<use xlink:href="#spectrum-css-icon-Cross100" />
</svg>
</div>
</button>
</div>
{/if}
</div>
{/if}
@ -63,4 +66,7 @@
pointer-events: all;
width: 100%;
}
.spectrum-Button {
border: 1px solid rgba(255, 255, 255, 0.2);
}
</style>

View File

@ -16,6 +16,9 @@
/>
<style>
hr {
background: var(--spectrum-global-color-gray-200);
}
hr.noMargin {
margin: 0;
}

View File

@ -6,6 +6,8 @@
export let title
export let fillWidth
export let left = "314px"
export let width = "calc(100% - 576px)"
let visible = false
@ -42,7 +44,12 @@
{#if visible}
<Portal>
<section class:fillWidth class="drawer" transition:slide|local>
<section
class:fillWidth
class="drawer"
transition:slide|local
style={`width: ${width}; left: ${left};`}
>
<header>
<div class="text">
<Heading size="XS">{title}</Heading>
@ -69,8 +76,6 @@
.drawer {
position: absolute;
bottom: 0;
left: 260px;
width: calc(100% - 520px);
background: var(--background);
border-top: var(--border-light);
z-index: 2;

View File

@ -40,5 +40,6 @@
on:change={onChange}
on:pick
on:type
on:blur
/>
</Field>

View File

@ -52,7 +52,10 @@
{id}
type="text"
on:focus={() => (focus = true)}
on:blur={() => (focus = false)}
on:blur={() => {
focus = false
dispatch("blur")
}}
on:change={onType}
value={value || ""}
placeholder={placeholder || ""}

View File

@ -6,12 +6,15 @@
import { createEventDispatcher } from "svelte"
import clickOutside from "../../Actions/click_outside"
import Search from "./Search.svelte"
import Icon from "../../Icon/Icon.svelte"
import StatusLight from "../../StatusLight/StatusLight.svelte"
export let id = null
export let disabled = false
export let error = null
export let fieldText = ""
export let fieldIcon = ""
export let fieldColour = ""
export let isPlaceholder = false
export let placeholderOption = null
export let options = []
@ -20,6 +23,7 @@
export let getOptionLabel = option => option
export let getOptionValue = option => option
export let getOptionIcon = () => null
export let getOptionColour = () => null
export let open = false
export let readonly = false
export let quiet = false
@ -83,11 +87,10 @@
on:mousedown={onClick}
>
{#if fieldIcon}
<span class="icon-Placeholder-Padding">
<img src={fieldIcon} alt="icon" width="20" height="15" />
<span class="option-icon">
<Icon name={fieldIcon} />
</span>
{/if}
<span
class="spectrum-Picker-label"
class:is-placeholder={isPlaceholder}
@ -105,6 +108,11 @@
<use xlink:href="#spectrum-icon-18-Alert" />
</svg>
{/if}
{#if fieldColour}
<span class="option-colour">
<StatusLight size="L" color={fieldColour} />
</span>
{/if}
<svg
class="spectrum-Icon spectrum-UIIcon-ChevronDown100 spectrum-Picker-menuIcon"
focusable="false"
@ -158,13 +166,8 @@
on:click={() => onSelectOption(getOptionValue(option, idx))}
>
{#if getOptionIcon(option, idx)}
<span class="icon-Padding">
<img
src={getOptionIcon(option, idx)}
alt="icon"
width="20"
height="15"
/>
<span class="option-icon">
<Icon name={getOptionIcon(option, idx)} />
</span>
{/if}
<span class="spectrum-Menu-itemLabel">
@ -177,6 +180,11 @@
>
<use xlink:href="#spectrum-css-icon-Checkmark100" />
</svg>
{#if getOptionColour(option, idx)}
<span class="option-colour">
<StatusLight size="L" color={getOptionColour(option, idx)} />
</span>
{/if}
</li>
{/each}
{/if}
@ -214,13 +222,18 @@
padding-right: 2px;
}
.icon-Padding {
padding-right: 10px;
/* Icon and colour alignment */
.spectrum-Menu-checkmark {
align-self: center;
margin-top: 0;
}
.icon-Placeholder-Padding {
padding-top: 5px;
padding-right: 10px;
.option-colour {
padding-left: 8px;
}
.option-icon {
padding-right: 8px;
}
.spectrum-Popover :global(.spectrum-Search) {
margin-top: -1px;
margin-left: -1px;

View File

@ -8,6 +8,8 @@
export let id = null
export let updateOnChange = true
export let quiet = false
export let inputRef
const dispatch = createEventDispatcher()
let focus = false
@ -68,6 +70,7 @@
type="search"
class="spectrum-Textfield-input spectrum-Search-input"
autocomplete="off"
bind:this={inputRef}
/>
</div>
<button

View File

@ -11,6 +11,7 @@
export let getOptionLabel = option => option
export let getOptionValue = option => option
export let getOptionIcon = () => null
export let getOptionColour = () => null
export let readonly = false
export let quiet = false
export let autoWidth = false
@ -20,7 +21,19 @@
const dispatch = createEventDispatcher()
let open = false
$: fieldText = getFieldText(value, options, placeholder)
$: fieldIcon = getFieldIcon(value, options, placeholder)
$: fieldIcon = getFieldAttribute(getOptionIcon, value, options)
$: fieldColour = getFieldAttribute(getOptionColour, value, options)
const getFieldAttribute = (getAttribute, value, options) => {
// Wait for options to load if there is a value but no options
if (!options?.length) {
return ""
}
const index = options.findIndex(
(option, idx) => getOptionValue(option, idx) === value
)
return index !== -1 ? getAttribute(options[index], index) : null
}
const getFieldText = (value, options, placeholder) => {
// Always use placeholder if no value
@ -28,27 +41,7 @@
return placeholder || "Choose an option"
}
// Wait for options to load if there is a value but no options
if (!options?.length) {
return ""
}
// Render the label if the selected option is found, otherwise raw value
const index = options.findIndex(
(option, idx) => getOptionValue(option, idx) === value
)
return index !== -1 ? getOptionLabel(options[index], index) : value
}
const getFieldIcon = (value, options) => {
// Wait for options to load if there is a value but no options
if (!options?.length) {
return ""
}
const index = options.findIndex(
(option, idx) => getOptionValue(option, idx) === value
)
return index !== -1 ? getOptionIcon(options[index], index) : null
return getFieldAttribute(getOptionLabel, value, options)
}
const selectOption = value => {
@ -66,12 +59,14 @@
{disabled}
{readonly}
{fieldText}
{fieldIcon}
{fieldColour}
{options}
{autoWidth}
{getOptionLabel}
{getOptionValue}
{getOptionIcon}
{fieldIcon}
{getOptionColour}
{autocomplete}
{sort}
isPlaceholder={value == null || value === ""}

View File

@ -0,0 +1,86 @@
<script>
import "@spectrum-css/slider/dist/index-vars.css"
import { createEventDispatcher } from "svelte"
export let value = false
export let id = null
export let disabled = false
export let min = 0
export let max = 100
export let step = 1
const dispatch = createEventDispatcher()
const onChange = event => {
dispatch("change", event.target.value)
}
</script>
<div>
<input
type="range"
{min}
{max}
{step}
{value}
{disabled}
{id}
on:change={onChange}
/>
</div>
<style>
div {
display: grid;
place-items: center;
}
input {
width: 100%;
padding: 0;
margin: 0;
-webkit-appearance: none;
background: transparent;
}
input::-webkit-slider-thumb {
-webkit-appearance: none;
}
input:focus {
outline: none;
}
input[type="range"]::-webkit-slider-thumb {
-webkit-appearance: none;
border: 2px solid var(--spectrum-global-color-gray-700);
height: 16px;
width: 16px;
border-radius: 50%;
background: var(--background);
cursor: pointer;
transition: background 130ms ease-out;
margin-top: -7px;
}
input[type="range"]::-moz-range-thumb {
border: 2px solid var(--spectrum-global-color-gray-700);
height: 12px;
width: 12px;
border-radius: 50%;
background: var(--background);
cursor: pointer;
transition: background 130ms ease-out;
}
input[type="range"]::-webkit-slider-runnable-track {
width: 100%;
height: 2px;
cursor: pointer;
background: var(--spectrum-global-color-gray-300);
border-radius: 2px;
}
input[type="range"]::-moz-range-track {
width: 100%;
height: 2px;
cursor: pointer;
background: var(--spectrum-global-color-gray-300);
border-radius: 2px;
}
</style>

View File

@ -12,3 +12,4 @@ export { default as CoreDatePicker } from "./DatePicker.svelte"
export { default as CoreDropzone } from "./Dropzone.svelte"
export { default as CoreStepper } from "./Stepper.svelte"
export { default as CoreRichTextField } from "./RichTextField.svelte"
export { default as CoreSlider } from "./Slider.svelte"

View File

@ -10,6 +10,7 @@
export let disabled = false
export let updateOnChange = true
export let quiet = false
export let inputRef
const dispatch = createEventDispatcher()
const onChange = e => {
@ -25,6 +26,7 @@
{value}
{placeholder}
{quiet}
bind:inputRef
on:change={onChange}
on:click
on:input

View File

@ -14,6 +14,7 @@
export let getOptionLabel = option => extractProperty(option, "label")
export let getOptionValue = option => extractProperty(option, "value")
export let getOptionIcon = option => option?.icon
export let getOptionColour = option => option?.colour
export let quiet = false
export let autoWidth = false
export let sort = false
@ -47,6 +48,7 @@
{getOptionLabel}
{getOptionValue}
{getOptionIcon}
{getOptionColour}
on:change={onChange}
on:click
/>

View File

@ -0,0 +1,24 @@
<script>
import Field from "./Field.svelte"
import Slider from "./Core/Slider.svelte"
import { createEventDispatcher } from "svelte"
export let value = null
export let label = null
export let labelPosition = "above"
export let min = 0
export let max = 100
export let step = 1
export let disabled = false
export let error = null
const dispatch = createEventDispatcher()
const onChange = e => {
value = e.detail
dispatch("change", e.detail)
}
</script>
<Field {label} {labelPosition} {error}>
<Slider {disabled} {value} {min} {max} {step} on:change={onChange} />
</Field>

View File

@ -47,7 +47,7 @@
</svg>
{#if tooltip && showTooltip}
<div class="tooltip" in:fade={{ duration: 130, delay: 250 }}>
<Tooltip textWrapping direction={"bottom"} text={tooltip} />
<Tooltip textWrapping direction="bottom" text={tooltip} />
</div>
{/if}
</div>

View File

@ -0,0 +1,14 @@
<div class="icon-side-nav">
<slot />
</div>
<style>
.icon-side-nav {
display: flex;
flex-direction: column;
justify-content: flex-start;
align-items: center;
padding: var(--spacing-s);
gap: var(--spacing-xs);
}
</style>

View File

@ -0,0 +1,56 @@
<script>
import Icon from "../Icon/Icon.svelte"
import Tooltip from "../Tooltip/Tooltip.svelte"
import { fade } from "svelte/transition"
export let icon
export let active = false
export let tooltip
let showTooltip = false
</script>
<div
class="icon-side-nav-item"
class:active
on:mouseover={() => (showTooltip = true)}
on:focus={() => (showTooltip = true)}
on:mouseleave={() => (showTooltip = false)}
on:click
>
<Icon name={icon} hoverable />
{#if tooltip && showTooltip}
<div class="tooltip" in:fade={{ duration: 130, delay: 250 }}>
<Tooltip textWrapping direction="right" text={tooltip} />
</div>
{/if}
</div>
<style>
.icon-side-nav-item {
width: 36px;
height: 36px;
display: grid;
place-items: center;
border-radius: 4px;
position: relative;
cursor: pointer;
transition: background 130ms ease-out;
}
.icon-side-nav-item:hover :global(svg),
.active :global(svg) {
color: var(--spectrum-global-color-gray-900);
}
.active {
background: var(--spectrum-global-color-gray-300);
}
.tooltip {
position: absolute;
pointer-events: none;
left: calc(100% - 4px);
top: 50%;
white-space: nowrap;
transform: translateY(-50%);
z-index: 1;
}
</style>

View File

@ -1,15 +1,20 @@
<script>
import { ActionButton } from "../"
import { createEventDispatcher } from "svelte"
export let type = "info"
export let icon = "Info"
export let message = ""
export let dismissable = false
export let actionMessage = null
export let action = null
export let wide = false
const dispatch = createEventDispatcher()
</script>
<div class="spectrum-Toast spectrum-Toast--{type}">
<div class="spectrum-Toast spectrum-Toast--{type}" class:wide>
{#if icon}
<svg
class="spectrum-Icon spectrum-Icon--sizeM spectrum-Toast-typeIcon"
@ -19,8 +24,13 @@
<use xlink:href="#spectrum-icon-18-{icon}" />
</svg>
{/if}
<div class="spectrum-Toast-body">
<div class="spectrum-Toast-body" class:actionBody={!!action}>
<div class="spectrum-Toast-content">{message || ""}</div>
{#if action}
<ActionButton quiet emphasized on:click={action}>
<div style="color: white; font-weight: 600;">{actionMessage}</div>
</ActionButton>
{/if}
</div>
{#if dismissable}
<div class="spectrum-Toast-buttons">
@ -46,4 +56,15 @@
.spectrum-Toast {
pointer-events: all;
}
.wide {
width: 100%;
}
.actionBody {
justify-content: space-between;
display: flex;
width: 100%;
align-items: center;
}
</style>

View File

@ -8,13 +8,15 @@
<Portal target=".modal-container">
<div class="notifications">
{#each $notifications as { type, icon, message, id, dismissable } (id)}
<div transition:fly={{ y: -30 }}>
{#each $notifications as { type, icon, message, id, dismissable, action, wide } (id)}
<div transition:fly={{ y: 30 }}>
<Notification
{type}
{icon}
{message}
{dismissable}
{action}
{wide}
on:dismiss={() => notifications.dismiss(id)}
/>
</div>
@ -25,7 +27,7 @@
<style>
.notifications {
position: fixed;
top: 20px;
bottom: 40px;
left: 0;
right: 0;
margin: 0 auto;

View File

@ -17,10 +17,13 @@
export let negative = false
export let disabled = false
export let active = false
export let color = null
</script>
<div
class="spectrum-StatusLight spectrum-StatusLight--size{size}"
class:custom={!!color}
style={`--color: ${color};`}
class:spectrum-StatusLight--celery={celery}
class:spectrum-StatusLight--yellow={yellow}
class:spectrum-StatusLight--fuchsia={fuchsia}
@ -36,6 +39,26 @@
class:spectrum-StatusLight--negative={negative}
class:spectrum-StatusLight--disabled={disabled}
class:spectrum-StatusLight--active={active}
class:withText={!!$$slots.default}
>
<slot />
</div>
<style>
.spectrum-StatusLight {
display: flex;
flex-direction: row;
justify-content: center;
align-items: center;
--spectrum-statuslight-info-text-gap: 4px;
min-height: 0;
padding-top: 0;
padding-bottom: 0;
}
.spectrum-StatusLight.withText::before {
margin-right: 10px;
}
.custom::before {
background: var(--color) !important;
}
</style>

View File

@ -20,7 +20,16 @@ export const createNotificationStore = () => {
setTimeout(() => (block = false), timeout)
}
const send = (message, type = "default", icon = "", autoDismiss = true) => {
const send = (
message,
{
type = "default",
icon = "",
autoDismiss = true,
action = null,
wide = false,
}
) => {
if (block) {
return
}
@ -28,7 +37,15 @@ export const createNotificationStore = () => {
_notifications.update(state => {
return [
...state,
{ id: _id, type, message, icon, dismissable: !autoDismiss },
{
id: _id,
type,
message,
icon,
dismissable: !autoDismiss,
action,
wide,
},
]
})
if (autoDismiss) {
@ -50,10 +67,11 @@ export const createNotificationStore = () => {
return {
subscribe,
send,
info: msg => send(msg, "info", "Info"),
error: msg => send(msg, "error", "Alert", false),
warning: msg => send(msg, "warning", "Alert"),
success: msg => send(msg, "success", "CheckmarkCircle"),
info: msg => send(msg, { type: "info", icon: "Info" }),
error: msg =>
send(msg, { type: "error", icon: "Alert", autoDismiss: false }),
warning: msg => send(msg, { type: "warning", icon: "Alert" }),
success: msg => send(msg, { type: "success", icon: "CheckmarkCircle" }),
blockNotifications,
dismiss: dismissNotification,
}

View File

@ -5,7 +5,7 @@
const displayLimit = 5
$: badges = value?.slice(0, displayLimit) ?? []
$: badges = Array.isArray(value) ? value.slice(0, displayLimit) : []
$: leftover = (value?.length ?? 0) - badges.length
</script>

View File

@ -26,12 +26,20 @@
array: ArrayRenderer,
internal: InternalRenderer,
}
$: type = schema?.type ?? "string"
$: type = getType(schema)
$: customRenderer = customRenderers?.find(x => x.column === schema?.name)
$: renderer = customRenderer?.component ?? typeMap[type] ?? StringRenderer
$: width = schema?.width || "150px"
$: cellValue = getCellValue(value, schema.template)
const getType = schema => {
// Use a string renderer for dates if we use a custom template
if (schema?.type === "datetime" && schema?.template) {
return "string"
}
return schema?.type || "string"
}
const getCellValue = (value, template) => {
if (!template) {
return value

View File

@ -37,6 +37,7 @@
export let autoSortColumns = true
export let compact = false
export let customPlaceholder = false
export let placeholderText = "No rows found"
const dispatch = createEventDispatcher()
@ -405,7 +406,7 @@
>
<use xlink:href="#spectrum-icon-18-Table" />
</svg>
<div>No rows found</div>
<div>{placeholderText}</div>
</div>
{/if}
</div>

View File

@ -79,4 +79,10 @@
.emphasized {
color: var(--spectrum-global-color-blue-600);
}
.spectrum-Tabs-item {
color: var(--spectrum-global-color-gray-600);
}
.spectrum-Tabs-item.is-selected {
color: var(--spectrum-global-color-gray-900);
}
</style>

View File

@ -10,8 +10,7 @@
export let noHorizPadding = false
export let quiet = false
export let emphasized = false
// overlay content from the tab bar onto tabs e.g. for a dropdown
export let onTop = false
export let size = "M"
let thisSelected = undefined
@ -74,20 +73,18 @@
<div
bind:this={container}
class:quiet
class:spectrum-Tabs--quiet={quiet}
class:noHorizPadding
class="selected-border spectrum-Tabs {quiet &&
'spectrum-Tabs--quiet'} spectrum-Tabs--{vertical
? 'vertical'
: 'horizontal'}"
class:onTop
class:spectrum-Tabs--vertical={vertical}
class:spectrum-Tabs--horizontal={!vertical}
class="spectrum-Tabs spectrum-Tabs--size{size}"
>
<slot />
{#if $tab.info}
<div
class="spectrum-Tabs-selectionIndicator indicator-transition"
style="{emphasized &&
'background-color: var(--spectrum-global-color-blue-400)'}; width: {width}; height: {height}; left: {left}; top: {top};"
class="spectrum-Tabs-selectionIndicator"
class:emphasized
style="width: {width}; height: {height}; left: {left}; top: {top};"
/>
{/if}
</div>
@ -98,26 +95,26 @@
/>
<style>
.quiet {
.spectrum-Tabs--quiet {
border-bottom: none !important;
}
.onTop {
z-index: 20;
}
.spectrum-Tabs {
padding-left: var(--spacing-xl);
padding-right: var(--spacing-xl);
position: relative;
border-bottom: 1px solid var(--spectrum-global-color-gray-300);
border-bottom-color: var(--spectrum-global-color-gray-200);
}
.spectrum-Tabs-content {
margin-top: var(--spectrum-global-dimension-static-size-150);
}
.indicator-transition {
.spectrum-Tabs-selectionIndicator {
transition: all 200ms;
background-color: var(--spectrum-global-color-gray-900);
}
.spectrum-Tabs-selectionIndicator.emphasized {
background-color: var(--spectrum-global-color-blue-400);
}
.spectrum-Tabs--horizontal .spectrum-Tabs-selectionIndicator {
bottom: 0 !important;
}
.noHorizPadding {
padding: 0;

View File

@ -8,7 +8,7 @@
<!-- Showing / Hiding a text wrapped tooltip should be handled outside the component -->
{#if textWrapping}
<span class="spectrum-Tooltip spectrum-Tooltip--{direction} is-open">
<span class="spectrum-Tooltip spectrum-Tooltip--{direction} is-open tooltip">
<span class="spectrum-Tooltip-label">{text}</span>
<span class="spectrum-Tooltip-tip" />
</span>
@ -22,3 +22,9 @@
</div>
</span>
{/if}
<style>
.tooltip {
pointer-events: none;
}
</style>

View File

@ -1,9 +1,7 @@
<script>
import "@spectrum-css/typography/dist/index-vars.css"
// Sizes
export let size = "M"
export let serif = false
</script>

View File

@ -64,6 +64,9 @@ export { default as BannerDisplay } from "./Banner/BannerDisplay.svelte"
export { default as MarkdownEditor } from "./Markdown/MarkdownEditor.svelte"
export { default as MarkdownViewer } from "./Markdown/MarkdownViewer.svelte"
export { default as RichTextField } from "./Form/RichTextField.svelte"
export { default as IconSideNav } from "./IconSideNav/IconSideNav.svelte"
export { default as IconSideNavItem } from "./IconSideNav/IconSideNavItem.svelte"
export { default as Slider } from "./Form/Slider.svelte"
// Renderers
export { default as BoldRenderer } from "./Table/BoldRenderer.svelte"

View File

@ -206,6 +206,11 @@
resolved "https://registry.yarnpkg.com/@spectrum-css/sidenav/-/sidenav-3.0.2.tgz#9d70f408d588ee79c69857751010333671f32713"
integrity sha512-YpIdH/F0jEICYmoduGrnkTmxwJq1kfKxEp0wOs+ZkQOsvKMv1an7nyhsfOKCQqcGNfYzJ9mJAk7/u5+vsxHa8g==
"@spectrum-css/slider@3.0.1":
version "3.0.1"
resolved "https://registry.yarnpkg.com/@spectrum-css/slider/-/slider-3.0.1.tgz#5281e6f47eb5a4fd3d1816c138bf66d01d7f2e49"
integrity sha512-DI2dtMRnQuDM1miVzl3SGyR1khUEKnwdXfO5EHDFwkC3yav43F5QogkfjmjFmWWobMVovdJlAuiaaJ/IHejD0Q==
"@spectrum-css/statuslight@^3.0.2":
version "3.0.2"
resolved "https://registry.yarnpkg.com/@spectrum-css/statuslight/-/statuslight-3.0.2.tgz#dc54b6cd113413dcdb909c486b5d7bae60db65c5"
@ -226,10 +231,10 @@
resolved "https://registry.yarnpkg.com/@spectrum-css/table/-/table-3.0.2.tgz#c666743d569fef81ddc8810fac8cda53b315f8d7"
integrity sha512-nt/QNC7NmUank0wozd4FySEX1UIYXuvuOKDyN1II3sxfwFSpJfp/Df9KVMhrYs4EsmB4XMGcoxp8ND/CrvH3ow==
"@spectrum-css/tabs@^3.0.1":
version "3.0.2"
resolved "https://registry.yarnpkg.com/@spectrum-css/tabs/-/tabs-3.0.2.tgz#822316672e7b0dfba66faa988e638ddae18c700e"
integrity sha512-4RNcmwf0wxLpB7M54H02owlj0mKE8neL1+lytQpxOOhlwTO5zdsD82zjvx9tIc8tRnRKuhCCCwTuBxHYstnBmw==
"@spectrum-css/tabs@^3.2.12":
version "3.2.12"
resolved "https://registry.yarnpkg.com/@spectrum-css/tabs/-/tabs-3.2.12.tgz#9b08f23d5aa881b3441af7757800c7173e5685ff"
integrity sha512-rPFUW9SSW4+3/UJ3UrtY2/l3sQvlqB1fqxHLPDjgykvbfrnMejcCTNV4ZrFNHXpE/6+kGnk+yVViSPtWGwJzkA==
"@spectrum-css/tags@^3.0.2":
version "3.0.2"

View File

@ -13,7 +13,7 @@
"HOST_IP": ""
},
"retries": {
"runMode": 2,
"runMode": 1,
"openMode": 0
}
}
}

View File

@ -16,18 +16,15 @@ filterTests(['all'], () => {
it("should add form with multi select picker, containing 5 options", () => {
cy.navigateToFrontend()
cy.wait(500)
// Add data provider
cy.get(interact.CATEGORY_DATA).click()
cy.get(interact.CATEGORY_DATA, { timeout: 500 }).click()
cy.get(interact.COMPONENT_DATA_PROVIDER).click()
cy.get(interact.DATASOURCE_PROP_CONTROL).click()
cy.get(interact.DROPDOWN).contains("Multi Data").click()
cy.wait(500)
// Add Form with schema to match table
cy.addComponent("Form", "Form")
cy.get(interact.DATASOURCE_PROP_CONTROL).click()
cy.get(interact.DROPDOWN).contains("Multi Data").click()
cy.wait(500)
// Add multi-select picker to form
cy.addComponent("Form", "Multi-select Picker").then(componentId => {
cy.get(interact.DATASOURCE_FIELD_CONTROL).type("Test Data").type("{enter}")

View File

@ -10,6 +10,7 @@ filterTests(['all'], () => {
it("should add Radio Buttons options picker on form, add data, and confirm", () => {
cy.navigateToFrontend()
cy.wait(500)
cy.addComponent("Form", "Form")
cy.addComponent("Form", "Options Picker").then((componentId) => {
// Provide field setting
@ -36,5 +37,9 @@ filterTests(['all'], () => {
})
cy.addCustomSourceOptions(totalRadioButtons)
}
after(() => {
cy.deleteAllApps()
})
})
})

View File

@ -0,0 +1,132 @@
import filterTests from "../../support/filterTests"
const interact = require('../../support/interact')
filterTests(["smoke", "all"], () => {
context("Account Portals", () => {
const bbUserEmail = "bbuser@test.com"
before(() => {
cy.login()
cy.deleteApp("Cypress Tests")
cy.createApp("Cypress Tests", false)
// Create new user
cy.wait(500)
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000})
cy.createUser(bbUserEmail)
cy.contains("bbuser").click()
cy.wait(500)
// Reset password
cy.get(".spectrum-ActionButton-label", { timeout: 2000 }).contains("Force password reset").click({ force: true })
cy.get(".spectrum-Dialog-grid")
.find(interact.SPECTRUM_TEXTFIELD_INPUT).invoke('val').as('pwd')
cy.get(interact.SPECTRUM_BUTTON).contains("Reset password").click({ force: true })
// Login as new user and set password
cy.logOut()
cy.get('@pwd').then((pwd) => {
cy.login(bbUserEmail, pwd)
})
for (let i = 0; i < 2; i++) {
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).eq(i).type("test")
}
cy.get(interact.SPECTRUM_BUTTON).contains("Reset your password").click({ force: true })
cy.logoutNoAppGrid()
})
it("should verify Admin Portal", () => {
cy.login()
cy.contains("Users").click()
cy.contains("bbuser").click()
// Enable Development & Administration access
cy.wait(500)
for (let i = 4; i < 6; i++) {
cy.get(interact.FIELD).eq(i).within(() => {
cy.get(interact.SPECTRUM_SWITCH_INPUT).click({ force: true })
cy.get(interact.SPECTRUM_SWITCH_INPUT).should('be.enabled')
})
}
bbUserLogin()
// Verify available options for Admin portal
cy.get(".spectrum-SideNav")
.should('contain', 'Apps')
//.and('contain', 'Usage')
.and('contain', 'Users')
.and('contain', 'Auth')
.and('contain', 'Email')
.and('contain', 'Organisation')
.and('contain', 'Theming')
.and('contain', 'Update')
//.and('contain', 'Upgrade')
cy.logOut()
})
it("should verify Development Portal", () => {
// Only Development access should be enabled
cy.login()
cy.contains("Users").click()
cy.contains("bbuser").click()
cy.wait(500)
cy.get(interact.FIELD).eq(5).within(() => {
cy.get(interact.SPECTRUM_SWITCH_INPUT).click({ force: true })
})
bbUserLogin()
// Verify available options for Admin portal
cy.get(interact.SPECTRUM_SIDENAV)
.should('contain', 'Apps')
//.and('contain', 'Usage')
.and('not.contain', 'Users')
.and('not.contain', 'Auth')
.and('not.contain', 'Email')
.and('not.contain', 'Organisation')
.and('contain', 'Theming')
.and('not.contain', 'Update')
.and('not.contain', 'Upgrade')
cy.logOut()
})
it("should verify Standard Portal", () => {
// Development access should be disabled (Admin access is already disabled)
cy.login()
cy.contains("Users").click()
cy.contains("bbuser").click()
cy.wait(500)
cy.get(interact.FIELD).eq(4).within(() => {
cy.get(interact.SPECTRUM_SWITCH_INPUT).click({ force: true })
})
bbUserLogin()
// Verify Standard Portal
cy.get(interact.SPECTRUM_SIDENAV).should('not.exist') // No config sections
cy.get(interact.CREATE_APP_BUTTON).should('not.exist') // No create app button
cy.get(".app").should('not.exist') // No apps -> no roles assigned to user
cy.get(interact.CONTAINER).should('contain', bbUserEmail) // Message containing users email
cy.logoutNoAppGrid()
})
const bbUserLogin = () => {
// Login as bbuser
cy.logOut()
cy.login(bbUserEmail, "test")
}
after(() => {
cy.login()
// Delete BB user
cy.deleteUser(bbUserEmail)
})
})
})

View File

@ -0,0 +1,237 @@
import filterTests from "../../support/filterTests"
const interact = require('../../support/interact')
filterTests(["smoke", "all"], () => {
context("User Management", () => {
before(() => {
cy.login()
cy.deleteApp("Cypress Tests")
cy.createApp("Cypress Tests", false)
})
it("should create a user via basic onboarding", () => {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000})
cy.createUser("bbuser@test.com")
cy.get(interact.SPECTRUM_TABLE).should("contain", "bbuser")
})
it("should confirm basic permission for a New User", () => {
// Basic permission = development & administraton disabled
cy.contains("bbuser").click()
// Confirm development and admin access are disabled
for (let i = 4; i < 6; i++) {
cy.wait(500)
cy.get(interact.FIELD).eq(i).within(() => {
//cy.get(interact.SPECTRUM_SWITCH_INPUT).should('be.disabled')
cy.get(".spectrum-Switch-switch").should('not.be.checked')
})
}
// Existing apps appear within the No Access table
cy.get(interact.SPECTRUM_TABLE, { timeout: 500 }).eq(1).should("not.contain", "No rows found")
// Configure roles table should not contain apps
cy.get(interact.SPECTRUM_TABLE).eq(0).contains("No rows found")
})
if (Cypress.env("TEST_ENV")) {
it("should assign role types", () => {
// 3 apps minimum required - to assign an app to each role type
cy.request(`${Cypress.config().baseUrl}/api/applications?status=all`)
.its("body")
.then(val => {
if (val.length < 3) {
for (let i = 1; i < 3; i++) {
const uuid = () => Cypress._.random(0, 1e6)
const name = uuid()
if(i < 1){
cy.createApp(name, false)
} else {
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000})
cy.wait(1000)
cy.get(interact.CREATE_APP_BUTTON, { timeout: 2000 }).click({ force: true })
cy.createAppFromScratch(name)
}
}
}
})
// Navigate back to the user
cy.visit(`${Cypress.config().baseUrl}/builder`, { timeout: 5000})
cy.get(interact.SPECTRUM_SIDENAV).contains("Users").click()
cy.get(interact.SPECTRUM_TABLE, { timeout: 1000 }).contains("bbuser").click()
for (let i = 0; i < 3; i++) {
cy.get(interact.SPECTRUM_TABLE, { timeout: 3000})
.eq(1)
.find(interact.SPECTRUM_TABLE_ROW)
.eq(0)
.find(interact.SPECTRUM_TABLE_CELL)
.eq(0)
.click()
cy.get(interact.SPECTRUM_DIALOG_GRID, { timeout: 1000 })
.contains("Choose an option")
.click()
.then(() => {
if (i == 0) {
cy.get(interact.SPECTRUM_MENU, { timeout: 2000 }).contains("Admin").click({ force: true })
}
else if (i == 1) {
cy.get(interact.SPECTRUM_MENU, { timeout: 2000 }).contains("Power").click({ force: true })
}
else if (i == 2) {
cy.get(interact.SPECTRUM_MENU, { timeout: 2000 }).contains("Basic").click({ force: true })
}
cy.get(interact.SPECTRUM_BUTTON, { timeout: 2000 })
.contains("Update role")
.click({ force: true })
})
cy.reload({ timeout: 5000 })
cy.wait(1000)
}
// Confirm roles exist within Configure roles table
cy.get(interact.SPECTRUM_TABLE, { timeout: 2000 })
.eq(0)
.within(assginedRoles => {
expect(assginedRoles).to.contain("Admin")
expect(assginedRoles).to.contain("Power")
expect(assginedRoles).to.contain("Basic")
})
})
it("should unassign role types", () => {
// Set each app within Configure roles table to 'No Access'
cy.get(interact.SPECTRUM_TABLE)
.eq(0)
.find(interact.SPECTRUM_TABLE_ROW)
.its("length")
.then(len => {
for (let i = 0; i < len; i++) {
cy.get(interact.SPECTRUM_TABLE)
.eq(0)
.find(interact.SPECTRUM_TABLE_ROW)
.eq(0)
.find(interact.SPECTRUM_TABLE_CELL)
.eq(0)
.click()
.then(() => {
cy.get(interact.SPECTRUM_PICKER).eq(1).click({ force: true })
cy.get(interact.SPECTRUM_POPOVER, { timeout: 500 }).contains("No Access").click()
})
cy.get(interact.SPECTRUM_BUTTON)
.contains("Update role")
.click({ force: true })
}
})
// Confirm Configure roles table no longer has any apps in it
cy.get(interact.SPECTRUM_TABLE, { timeout: 1000 }).eq(0).contains("No rows found")
})
}
it("should enable Developer access and verify application access", () => {
// Enable Developer access
cy.get(interact.FIELD)
.eq(4)
.within(() => {
cy.get(interact.SPECTRUM_SWITCH_INPUT).click({ force: true })
})
// No Access table should now be empty
cy.get(interact.CONTAINER)
.contains("No Access")
.parent()
.within(() => {
cy.get(interact.SPECTRUM_TABLE).contains("No rows found")
})
// Each app within Configure roles should have Admin access
cy.get(interact.SPECTRUM_TABLE)
.eq(0)
.find(interact.SPECTRUM_TABLE_ROW)
.its("length")
.then(len => {
for (let i = 0; i < len; i++) {
cy.get(interact.SPECTRUM_TABLE)
.eq(0)
.find(interact.SPECTRUM_TABLE_ROW)
.eq(i)
.contains("Admin")
cy.wait(500)
}
})
})
it("should disable Developer access and verify application access", () => {
// Disable Developer access
cy.get(interact.FIELD)
.eq(4)
.within(() => {
cy.get(".spectrum-Switch-input").click({ force: true })
})
// Configure roles table should now be empty
cy.get(interact.CONTAINER)
.contains("Configure roles")
.parent()
.within(() => {
cy.get(interact.SPECTRUM_TABLE).contains("No rows found")
})
})
it("Should edit user details within user details page", () => {
// Add First name
cy.get(interact.FIELD, { timeout: 1000 }).eq(2).within(() => {
cy.wait(500)
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT, { timeout: 1000 }).wait(500).clear().click().type("bb")
})
// Add Last name
cy.get(interact.FIELD, { timeout: 1000 }).eq(3).within(() => {
cy.wait(500)
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT, { timeout: 1000 }).click().wait(500).clear().type("test")
})
cy.get(interact.FIELD, { timeout: 1000 }).eq(0).click()
// Reload page
cy.reload()
// Confirm details have been saved
cy.get(interact.FIELD, { timeout: 1000 }).eq(2).within(() => {
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).should('have.value', "bb")
})
cy.get(interact.FIELD, { timeout: 1000 }).eq(3).within(() => {
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT, { timeout: 1000 }).should('have.value', "test")
})
})
it("should reset the users password", () => {
cy.get(interact.REGENERATE, { timeout: 500 }).contains("Force password reset").click({ force: true })
// Reset password modal
cy.get(interact.SPECTRUM_DIALOG_GRID)
.find(interact.SPECTRUM_TEXTFIELD_INPUT).invoke('val').as('pwd')
cy.get(interact.SPECTRUM_BUTTON).contains("Reset password").click({ force: true })
// Logout, then login with new password
cy.logOut()
cy.get('@pwd').then((pwd) => {
cy.login("bbuser@test.com", pwd)
})
// Reset password screen
for (let i = 0; i < 2; i++) {
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT).eq(i).type("test")
}
cy.get(interact.SPECTRUM_BUTTON).contains("Reset your password").click({ force: true })
// Confirm user logged in afer password change
cy.get(".avatar > .icon").click({ force: true })
cy.get(".spectrum-Menu-item").contains("Update user information").click({ force: true })
cy.get(interact.SPECTRUM_TEXTFIELD_INPUT)
.eq(0)
.invoke('val').should('eq', 'bbuser@test.com')
// Logout and login as previous user
cy.logoutNoAppGrid()
cy.login()
})
it("should delete a user", () => {
cy.deleteUser("bbuser@test.com")
cy.get(interact.SPECTRUM_TABLE, { timeout: 4000 }).should("not.have.text", "bbuser")
})
})
})

Some files were not shown because too many files have changed in this diff Show More