Merge remote-tracking branch 'origin/master' into fix/block-duplicate-autocolumn-types
This commit is contained in:
commit
117191e5cb
|
@ -38,17 +38,6 @@ jobs:
|
||||||
fi
|
fi
|
||||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Tag and release Proxy service docker image
|
|
||||||
run: |
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
|
||||||
yarn build:docker:proxy:prod
|
|
||||||
docker tag proxy-service budibase/proxy:$PROD_TAG
|
|
||||||
docker push budibase/proxy:$PROD_TAG
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
PROD_TAG: k8s
|
|
||||||
|
|
||||||
- name: Configure AWS Credentials
|
- name: Configure AWS Credentials
|
||||||
uses: aws-actions/configure-aws-credentials@v1
|
uses: aws-actions/configure-aws-credentials@v1
|
||||||
with:
|
with:
|
||||||
|
|
|
@ -28,17 +28,6 @@ jobs:
|
||||||
release_version=$(cat lerna.json | jq -r '.version')
|
release_version=$(cat lerna.json | jq -r '.version')
|
||||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Tag and release Proxy service docker image
|
|
||||||
run: |
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
|
||||||
yarn build:docker:proxy:preprod
|
|
||||||
docker tag proxy-service budibase/proxy:$PREPROD_TAG
|
|
||||||
docker push budibase/proxy:$PREPROD_TAG
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
PREPROD_TAG: k8s-preprod
|
|
||||||
|
|
||||||
- name: Pull values.yaml from budibase-infra
|
- name: Pull values.yaml from budibase-infra
|
||||||
run: |
|
run: |
|
||||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||||
|
|
|
@ -29,17 +29,6 @@ jobs:
|
||||||
release_version=$(cat lerna.json | jq -r '.version')
|
release_version=$(cat lerna.json | jq -r '.version')
|
||||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Tag and release Proxy service docker image
|
|
||||||
run: |
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
|
||||||
yarn build:docker:proxy:release
|
|
||||||
docker tag proxy-service budibase/proxy:$RELEASE_TAG
|
|
||||||
docker push budibase/proxy:$RELEASE_TAG
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
RELEASE_TAG: k8s-release
|
|
||||||
|
|
||||||
- name: Pull values.yaml from budibase-infra
|
- name: Pull values.yaml from budibase-infra
|
||||||
run: |
|
run: |
|
||||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||||
|
|
|
@ -76,22 +76,25 @@ jobs:
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
||||||
|
|
||||||
- name: Get the latest budibase release version
|
deploy-to-release-env:
|
||||||
|
needs: [release-images]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Get the current budibase release version
|
||||||
id: version
|
id: version
|
||||||
run: |
|
run: |
|
||||||
release_version=$(cat lerna.json | jq -r '.version')
|
release_version=$(cat lerna.json | jq -r '.version')
|
||||||
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
echo "RELEASE_VERSION=$release_version" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Tag and release Proxy service docker image
|
- name: Configure AWS Credentials
|
||||||
run: |
|
uses: aws-actions/configure-aws-credentials@v1
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
with:
|
||||||
yarn build:docker:proxy:release
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||||||
docker tag proxy-service budibase/proxy:$RELEASE_TAG
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
docker push budibase/proxy:$RELEASE_TAG
|
aws-region: eu-west-1
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
RELEASE_TAG: k8s-release
|
|
||||||
|
|
||||||
- name: Pull values.yaml from budibase-infra
|
- name: Pull values.yaml from budibase-infra
|
||||||
run: |
|
run: |
|
||||||
|
|
|
@ -98,17 +98,6 @@ jobs:
|
||||||
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||||||
aws-region: eu-west-1
|
aws-region: eu-west-1
|
||||||
|
|
||||||
- name: Tag and release Proxy service docker image
|
|
||||||
run: |
|
|
||||||
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
|
|
||||||
yarn build:docker:proxy:preprod
|
|
||||||
docker tag proxy-service budibase/proxy:$PREPROD_TAG
|
|
||||||
docker push budibase/proxy:$PREPROD_TAG
|
|
||||||
env:
|
|
||||||
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}
|
|
||||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_API_KEY }}
|
|
||||||
PREPROD_TAG: k8s-preprod
|
|
||||||
|
|
||||||
- name: Pull values.yaml from budibase-infra
|
- name: Pull values.yaml from budibase-infra
|
||||||
run: |
|
run: |
|
||||||
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
curl -H "Authorization: token ${{ secrets.GH_PERSONAL_TOKEN }}" \
|
||||||
|
|
|
@ -66,8 +66,6 @@ typings/
|
||||||
.env
|
.env
|
||||||
!qa-core/.env
|
!qa-core/.env
|
||||||
!hosting/.env
|
!hosting/.env
|
||||||
hosting/.generated-nginx.dev.conf
|
|
||||||
hosting/proxy/.generated-nginx.prod.conf
|
|
||||||
|
|
||||||
# parcel-bundler cache (https://parceljs.org/)
|
# parcel-bundler cache (https://parceljs.org/)
|
||||||
.cache
|
.cache
|
||||||
|
@ -105,5 +103,7 @@ stats.html
|
||||||
|
|
||||||
# TypeScript cache
|
# TypeScript cache
|
||||||
*.tsbuildinfo
|
*.tsbuildinfo
|
||||||
|
|
||||||
|
# plugins
|
||||||
budibase-component
|
budibase-component
|
||||||
budibase-datasource
|
budibase-datasource
|
||||||
|
|
|
@ -28,11 +28,26 @@ spec:
|
||||||
app.kubernetes.io/name: budibase-proxy
|
app.kubernetes.io/name: budibase-proxy
|
||||||
spec:
|
spec:
|
||||||
containers:
|
containers:
|
||||||
- image: budibase/proxy:{{ .Values.services.proxy.tag | default "k8s" }}
|
- image: budibase/proxy:{{ .Values.globals.appVersion }}
|
||||||
imagePullPolicy: Always
|
imagePullPolicy: Always
|
||||||
name: proxy-service
|
name: proxy-service
|
||||||
ports:
|
ports:
|
||||||
- containerPort: {{ .Values.services.proxy.port }}
|
- containerPort: {{ .Values.services.proxy.port }}
|
||||||
|
env:
|
||||||
|
- name: APPS_UPSTREAM_URL
|
||||||
|
value: {{ tpl .Values.services.proxy.upstreams.apps . | quote }}
|
||||||
|
- name: WORKER_UPSTREAM_URL
|
||||||
|
value: {{ tpl .Values.services.proxy.upstreams.worker . | quote }}
|
||||||
|
- name: MINIO_UPSTREAM_URL
|
||||||
|
value: {{ tpl .Values.services.proxy.upstreams.minio . | quote }}
|
||||||
|
- name: COUCHDB_UPSTREAM_URL
|
||||||
|
value: {{ .Values.services.couchdb.url | default (tpl .Values.services.proxy.upstreams.couchdb .) | quote }}
|
||||||
|
- name: RESOLVER
|
||||||
|
{{ if .Values.services.proxy.resolver }}
|
||||||
|
value: {{ .Values.services.proxy.resolver }}
|
||||||
|
{{ else }}
|
||||||
|
value: kube-dns.kube-system.svc.{{ .Values.services.dns }}
|
||||||
|
{{ end }}
|
||||||
{{ with .Values.services.proxy.resources }}
|
{{ with .Values.services.proxy.resources }}
|
||||||
resources:
|
resources:
|
||||||
{{- toYaml . | nindent 10 }}
|
{{- toYaml . | nindent 10 }}
|
||||||
|
|
|
@ -124,6 +124,11 @@ services:
|
||||||
proxy:
|
proxy:
|
||||||
port: 10000
|
port: 10000
|
||||||
replicaCount: 1
|
replicaCount: 1
|
||||||
|
upstreams:
|
||||||
|
apps: 'http://app-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.apps.port }}'
|
||||||
|
worker: 'http://worker-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.worker.port }}'
|
||||||
|
minio: 'http://minio-service.{{ .Release.Namespace }}.svc.{{ .Values.services.dns }}:{{ .Values.services.objectStore.port }}'
|
||||||
|
couchdb: 'http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}'
|
||||||
resources: {}
|
resources: {}
|
||||||
|
|
||||||
apps:
|
apps:
|
||||||
|
|
|
@ -25,9 +25,9 @@ services:
|
||||||
proxy-service:
|
proxy-service:
|
||||||
container_name: budi-nginx-dev
|
container_name: budi-nginx-dev
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
image: nginx:latest
|
image: budibase/proxy:latest
|
||||||
volumes:
|
volumes:
|
||||||
- ./.generated-nginx.dev.conf:/etc/nginx/nginx.conf
|
- ./nginx.dev.conf:/etc/nginx/templates/nginx.conf.template
|
||||||
- ./proxy/error.html:/usr/share/nginx/html/error.html
|
- ./proxy/error.html:/usr/share/nginx/html/error.html
|
||||||
ports:
|
ports:
|
||||||
- "${MAIN_PORT}:10000"
|
- "${MAIN_PORT}:10000"
|
||||||
|
@ -36,6 +36,8 @@ services:
|
||||||
- couchdb-service
|
- couchdb-service
|
||||||
extra_hosts:
|
extra_hosts:
|
||||||
- "host.docker.internal:host-gateway"
|
- "host.docker.internal:host-gateway"
|
||||||
|
environment:
|
||||||
|
- PROXY_ADDRESS=host.docker.internal
|
||||||
|
|
||||||
couchdb-service:
|
couchdb-service:
|
||||||
# platform: linux/amd64
|
# platform: linux/amd64
|
||||||
|
|
|
@ -82,6 +82,12 @@ services:
|
||||||
environment:
|
environment:
|
||||||
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
- PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
||||||
- PROXY_RATE_LIMIT_API_PER_SECOND=20
|
- PROXY_RATE_LIMIT_API_PER_SECOND=20
|
||||||
|
- APPS_UPSTREAM_URL=http://app-service:4002
|
||||||
|
- WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||||
|
- MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||||
|
- COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||||
|
- WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||||
|
- RESOLVER=127.0.0.11
|
||||||
depends_on:
|
depends_on:
|
||||||
- minio-service
|
- minio-service
|
||||||
- worker-service
|
- worker-service
|
||||||
|
|
|
@ -25,17 +25,17 @@ http {
|
||||||
}
|
}
|
||||||
|
|
||||||
upstream app-service {
|
upstream app-service {
|
||||||
server {{address}}:4001;
|
server ${PROXY_ADDRESS}:4001;
|
||||||
keepalive 32;
|
keepalive 32;
|
||||||
}
|
}
|
||||||
|
|
||||||
upstream worker-service {
|
upstream worker-service {
|
||||||
server {{address}}:4002;
|
server ${PROXY_ADDRESS}:4002;
|
||||||
keepalive 32;
|
keepalive 32;
|
||||||
}
|
}
|
||||||
|
|
||||||
upstream builder {
|
upstream builder {
|
||||||
server {{address}}:3000;
|
server ${PROXY_ADDRESS}:3000;
|
||||||
keepalive 32;
|
keepalive 32;
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ FROM nginx:latest
|
||||||
# use the default nginx behaviour for *.template files which are processed with envsubst
|
# use the default nginx behaviour for *.template files which are processed with envsubst
|
||||||
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
|
# override the output dir to output directly to /etc/nginx instead of /etc/nginx/conf.d
|
||||||
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
|
ENV NGINX_ENVSUBST_OUTPUT_DIR=/etc/nginx
|
||||||
COPY .generated-nginx.prod.conf /etc/nginx/templates/nginx.conf.template
|
COPY nginx.prod.conf /etc/nginx/templates/nginx.conf.template
|
||||||
|
|
||||||
# IPv6 removal needs to happen after envsubst
|
# IPv6 removal needs to happen after envsubst
|
||||||
RUN rm -rf /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
|
RUN rm -rf /docker-entrypoint.d/10-listen-on-ipv6-by-default.sh
|
||||||
|
@ -16,4 +16,11 @@ COPY error.html /usr/share/nginx/html/error.html
|
||||||
|
|
||||||
# Default environment
|
# Default environment
|
||||||
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
ENV PROXY_RATE_LIMIT_WEBHOOKS_PER_SECOND=10
|
||||||
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20
|
ENV PROXY_RATE_LIMIT_API_PER_SECOND=20
|
||||||
|
# Use docker-compose values as defaults for backwards compatibility
|
||||||
|
ENV APPS_UPSTREAM_URL=http://app-service:4002
|
||||||
|
ENV WORKER_UPSTREAM_URL=http://worker-service:4003
|
||||||
|
ENV MINIO_UPSTREAM_URL=http://minio-service:9000
|
||||||
|
ENV COUCHDB_UPSTREAM_URL=http://couchdb-service:5984
|
||||||
|
ENV WATCHTOWER_UPSTREAM_URL=http://watchtower-service:8080
|
||||||
|
ENV RESOLVER=127.0.0.11
|
||||||
|
|
|
@ -23,7 +23,7 @@ http {
|
||||||
tcp_nodelay on;
|
tcp_nodelay on;
|
||||||
server_tokens off;
|
server_tokens off;
|
||||||
types_hash_max_size 2048;
|
types_hash_max_size 2048;
|
||||||
resolver {{ resolver }} valid=10s ipv6=off;
|
resolver ${RESOLVER} valid=10s ipv6=off;
|
||||||
|
|
||||||
# buffering
|
# buffering
|
||||||
client_header_buffer_size 1k;
|
client_header_buffer_size 1k;
|
||||||
|
@ -76,27 +76,23 @@ http {
|
||||||
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
|
add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always;
|
||||||
|
|
||||||
# upstreams
|
# upstreams
|
||||||
set $apps {{ apps }};
|
set $apps ${APPS_UPSTREAM_URL};
|
||||||
set $worker {{ worker }};
|
set $worker ${WORKER_UPSTREAM_URL};
|
||||||
set $minio {{ minio }};
|
set $minio ${MINIO_UPSTREAM_URL};
|
||||||
set $couchdb {{ couchdb }};
|
set $couchdb ${COUCHDB_UPSTREAM_URL};
|
||||||
{{#if watchtower}}
|
set $watchtower ${WATCHTOWER_UPSTREAM_URL};
|
||||||
set $watchtower {{ watchtower }};
|
|
||||||
{{/if}}
|
|
||||||
|
|
||||||
location /app {
|
location /app {
|
||||||
proxy_pass http://$apps:4002;
|
proxy_pass $apps;
|
||||||
}
|
}
|
||||||
|
|
||||||
location = / {
|
location = / {
|
||||||
proxy_pass http://$apps:4002;
|
proxy_pass $apps;
|
||||||
}
|
}
|
||||||
|
|
||||||
{{#if watchtower}}
|
|
||||||
location = /v1/update {
|
location = /v1/update {
|
||||||
proxy_pass http://$watchtower:8080;
|
proxy_pass $watchtower;
|
||||||
}
|
}
|
||||||
{{/if}}
|
|
||||||
|
|
||||||
location ~ ^/(builder|app_) {
|
location ~ ^/(builder|app_) {
|
||||||
proxy_http_version 1.1;
|
proxy_http_version 1.1;
|
||||||
|
@ -107,19 +103,17 @@ http {
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
|
||||||
proxy_pass http://$apps:4002;
|
proxy_pass $apps;
|
||||||
}
|
}
|
||||||
|
|
||||||
location ~ ^/api/(system|admin|global)/ {
|
location ~ ^/api/(system|admin|global)/ {
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
proxy_pass $worker;
|
||||||
proxy_pass http://$worker:4003;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
location /worker/ {
|
location /worker/ {
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
proxy_pass $worker;
|
||||||
proxy_pass http://$worker:4003;
|
|
||||||
rewrite ^/worker/(.*)$ /$1 break;
|
rewrite ^/worker/(.*)$ /$1 break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,7 +132,7 @@ http {
|
||||||
proxy_set_header X-Real-IP $remote_addr;
|
proxy_set_header X-Real-IP $remote_addr;
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
|
|
||||||
proxy_pass http://$apps:4002;
|
proxy_pass $apps;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/ {
|
location /api/ {
|
||||||
|
@ -157,7 +151,7 @@ http {
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
|
||||||
proxy_pass http://$apps:4002;
|
proxy_pass $apps;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /api/webhooks/ {
|
location /api/webhooks/ {
|
||||||
|
@ -177,11 +171,11 @@ http {
|
||||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
|
|
||||||
proxy_pass http://$apps:4002;
|
proxy_pass $apps;
|
||||||
}
|
}
|
||||||
|
|
||||||
location /db/ {
|
location /db/ {
|
||||||
proxy_pass http://$couchdb:5984;
|
proxy_pass $couchdb;
|
||||||
rewrite ^/db/(.*)$ /$1 break;
|
rewrite ^/db/(.*)$ /$1 break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,7 +185,7 @@ http {
|
||||||
proxy_set_header Connection 'upgrade';
|
proxy_set_header Connection 'upgrade';
|
||||||
proxy_set_header Host $host;
|
proxy_set_header Host $host;
|
||||||
proxy_cache_bypass $http_upgrade;
|
proxy_cache_bypass $http_upgrade;
|
||||||
proxy_pass http://$apps:4002;
|
proxy_pass $apps;
|
||||||
}
|
}
|
||||||
|
|
||||||
location / {
|
location / {
|
||||||
|
@ -205,7 +199,7 @@ http {
|
||||||
proxy_set_header Connection "";
|
proxy_set_header Connection "";
|
||||||
chunked_transfer_encoding off;
|
chunked_transfer_encoding off;
|
||||||
|
|
||||||
proxy_pass http://$minio:9000;
|
proxy_pass $minio;
|
||||||
}
|
}
|
||||||
|
|
||||||
client_header_timeout 60;
|
client_header_timeout 60;
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*"
|
"packages/*"
|
||||||
|
|
10
package.json
10
package.json
|
@ -55,15 +55,11 @@
|
||||||
"test:e2e:ci:record": "lerna run cy:ci:record --stream",
|
"test:e2e:ci:record": "lerna run cy:ci:record --stream",
|
||||||
"test:e2e:ci:notify": "lerna run cy:ci:notify",
|
"test:e2e:ci:notify": "lerna run cy:ci:notify",
|
||||||
"build:specs": "lerna run specs",
|
"build:specs": "lerna run specs",
|
||||||
"build:docker": "lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
"build:docker": "lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh $BUDIBASE_RELEASE_VERSION && cd -",
|
||||||
"build:docker:pre": "lerna run build && lerna run predocker",
|
"build:docker:pre": "lerna run build && lerna run predocker",
|
||||||
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
"build:docker:proxy": "docker build hosting/proxy -t proxy-service",
|
||||||
"build:docker:proxy:compose": "node scripts/proxy/generateProxyConfig compose && npm run build:docker:proxy",
|
|
||||||
"build:docker:proxy:preprod": "node scripts/proxy/generateProxyConfig preprod && npm run build:docker:proxy",
|
|
||||||
"build:docker:proxy:release": "node scripts/proxy/generateProxyConfig release && npm run build:docker:proxy",
|
|
||||||
"build:docker:proxy:prod": "node scripts/proxy/generateProxyConfig prod && npm run build:docker:proxy",
|
|
||||||
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
"build:docker:selfhost": "lerna run build:docker && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh latest && cd -",
|
||||||
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy:compose && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
"build:docker:develop": "node scripts/pinVersions && lerna run build:docker && npm run build:docker:proxy && cd hosting/scripts/linux/ && ./release-to-docker-hub.sh develop && cd -",
|
||||||
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
"build:docker:airgap": "node hosting/scripts/airgapped/airgappedDockerBuild",
|
||||||
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
"build:digitalocean": "cd hosting/digitalocean && ./build.sh && cd -",
|
||||||
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
"build:docker:single:multiarch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/single/Dockerfile -t budibase:latest .",
|
||||||
|
@ -87,4 +83,4 @@
|
||||||
"install:pro": "bash scripts/pro/install.sh",
|
"install:pro": "bash scripts/pro/install.sh",
|
||||||
"dep:clean": "yarn clean && yarn bootstrap"
|
"dep:clean": "yarn clean && yarn bootstrap"
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/backend-core",
|
"name": "@budibase/backend-core",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Budibase backend core libraries used in server and worker",
|
"description": "Budibase backend core libraries used in server and worker",
|
||||||
"main": "dist/src/index.js",
|
"main": "dist/src/index.js",
|
||||||
"types": "dist/src/index.d.ts",
|
"types": "dist/src/index.d.ts",
|
||||||
|
@ -21,7 +21,7 @@
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/nano": "10.1.1",
|
"@budibase/nano": "10.1.1",
|
||||||
"@budibase/types": "^2.2.12",
|
"@budibase/types": "^2.2.22",
|
||||||
"@shopify/jest-koa-mocks": "5.0.1",
|
"@shopify/jest-koa-mocks": "5.0.1",
|
||||||
"@techpass/passport-openidconnect": "0.3.2",
|
"@techpass/passport-openidconnect": "0.3.2",
|
||||||
"aws-sdk": "2.1030.0",
|
"aws-sdk": "2.1030.0",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/bbui",
|
"name": "@budibase/bbui",
|
||||||
"description": "A UI solution used in the different Budibase projects.",
|
"description": "A UI solution used in the different Budibase projects.",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"svelte": "src/index.js",
|
"svelte": "src/index.js",
|
||||||
"module": "dist/bbui.es.js",
|
"module": "dist/bbui.es.js",
|
||||||
|
@ -38,7 +38,7 @@
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@adobe/spectrum-css-workflow-icons": "1.2.1",
|
"@adobe/spectrum-css-workflow-icons": "1.2.1",
|
||||||
"@budibase/string-templates": "^2.2.12",
|
"@budibase/string-templates": "^2.2.22",
|
||||||
"@spectrum-css/actionbutton": "1.0.1",
|
"@spectrum-css/actionbutton": "1.0.1",
|
||||||
"@spectrum-css/actiongroup": "1.0.1",
|
"@spectrum-css/actiongroup": "1.0.1",
|
||||||
"@spectrum-css/avatar": "3.0.2",
|
"@spectrum-css/avatar": "3.0.2",
|
||||||
|
|
|
@ -76,13 +76,6 @@
|
||||||
}
|
}
|
||||||
// If time only set date component to 2000-01-01
|
// If time only set date component to 2000-01-01
|
||||||
if (timeOnly) {
|
if (timeOnly) {
|
||||||
// Classic flackpickr causing issues.
|
|
||||||
// When selecting a value for the first time for a "time only" field,
|
|
||||||
// the time is always offset by 1 hour for some reason (regardless of time
|
|
||||||
// zone) so we need to correct it.
|
|
||||||
if (!value && newValue) {
|
|
||||||
newValue = new Date(dates[0].getTime() + 60 * 60 * 1000).toISOString()
|
|
||||||
}
|
|
||||||
newValue = `2000-01-01T${newValue.split("T")[1]}`
|
newValue = `2000-01-01T${newValue.split("T")[1]}`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -113,7 +106,7 @@
|
||||||
|
|
||||||
const clearDateOnBackspace = event => {
|
const clearDateOnBackspace = event => {
|
||||||
if (["Backspace", "Clear", "Delete"].includes(event.key)) {
|
if (["Backspace", "Clear", "Delete"].includes(event.key)) {
|
||||||
dispatch("change", null)
|
dispatch("change", "")
|
||||||
flatpickr.close()
|
flatpickr.close()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/builder",
|
"name": "@budibase/builder",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"scripts": {
|
"scripts": {
|
||||||
|
@ -71,10 +71,10 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^2.2.12",
|
"@budibase/bbui": "^2.2.22",
|
||||||
"@budibase/client": "^2.2.12",
|
"@budibase/client": "^2.2.22",
|
||||||
"@budibase/frontend-core": "^2.2.12",
|
"@budibase/frontend-core": "^2.2.22",
|
||||||
"@budibase/string-templates": "^2.2.12",
|
"@budibase/string-templates": "^2.2.22",
|
||||||
"@sentry/browser": "5.19.1",
|
"@sentry/browser": "5.19.1",
|
||||||
"@spectrum-css/page": "^3.0.1",
|
"@spectrum-css/page": "^3.0.1",
|
||||||
"@spectrum-css/vars": "^3.0.1",
|
"@spectrum-css/vars": "^3.0.1",
|
||||||
|
|
|
@ -74,8 +74,18 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
const deleteAction = index => {
|
const deleteAction = index => {
|
||||||
|
// Check if we're deleting the selected action
|
||||||
|
const selectedIndex = actions.indexOf(selectedAction)
|
||||||
|
const isSelected = index === selectedIndex
|
||||||
|
|
||||||
|
// Delete the action
|
||||||
actions.splice(index, 1)
|
actions.splice(index, 1)
|
||||||
actions = actions
|
actions = actions
|
||||||
|
|
||||||
|
// Select a new action if we deleted the selected one
|
||||||
|
if (isSelected) {
|
||||||
|
selectedAction = actions?.length ? actions[0] : null
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const toggleActionList = () => {
|
const toggleActionList = () => {
|
||||||
|
|
|
@ -30,7 +30,7 @@
|
||||||
{#if $selectedComponent}
|
{#if $selectedComponent}
|
||||||
{#key $selectedComponent._id}
|
{#key $selectedComponent._id}
|
||||||
<Panel {title} icon={componentDefinition?.icon} borderLeft>
|
<Panel {title} icon={componentDefinition?.icon} borderLeft>
|
||||||
{#if componentDefinition.info}
|
{#if componentDefinition?.info}
|
||||||
<ComponentInfoSection {componentDefinition} />
|
<ComponentInfoSection {componentDefinition} />
|
||||||
{/if}
|
{/if}
|
||||||
<ComponentSettingsSection
|
<ComponentSettingsSection
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/cli",
|
"name": "@budibase/cli",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
"description": "Budibase CLI, for developers, self hosting and migrations.",
|
||||||
"main": "src/index.js",
|
"main": "src/index.js",
|
||||||
"bin": {
|
"bin": {
|
||||||
|
@ -26,9 +26,9 @@
|
||||||
"outputPath": "build"
|
"outputPath": "build"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/backend-core": "^2.2.12",
|
"@budibase/backend-core": "^2.2.22",
|
||||||
"@budibase/string-templates": "^2.2.12",
|
"@budibase/string-templates": "^2.2.22",
|
||||||
"@budibase/types": "^2.2.12",
|
"@budibase/types": "^2.2.22",
|
||||||
"axios": "0.21.2",
|
"axios": "0.21.2",
|
||||||
"chalk": "4.1.0",
|
"chalk": "4.1.0",
|
||||||
"cli-progress": "3.11.2",
|
"cli-progress": "3.11.2",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/client",
|
"name": "@budibase/client",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"module": "dist/budibase-client.js",
|
"module": "dist/budibase-client.js",
|
||||||
"main": "dist/budibase-client.js",
|
"main": "dist/budibase-client.js",
|
||||||
|
@ -19,9 +19,9 @@
|
||||||
"dev:builder": "rollup -cw"
|
"dev:builder": "rollup -cw"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^2.2.12",
|
"@budibase/bbui": "^2.2.22",
|
||||||
"@budibase/frontend-core": "^2.2.12",
|
"@budibase/frontend-core": "^2.2.22",
|
||||||
"@budibase/string-templates": "^2.2.12",
|
"@budibase/string-templates": "^2.2.22",
|
||||||
"@spectrum-css/button": "^3.0.3",
|
"@spectrum-css/button": "^3.0.3",
|
||||||
"@spectrum-css/card": "^3.0.3",
|
"@spectrum-css/card": "^3.0.3",
|
||||||
"@spectrum-css/divider": "^1.0.3",
|
"@spectrum-css/divider": "^1.0.3",
|
||||||
|
|
|
@ -171,6 +171,15 @@
|
||||||
$: pad = pad || (interactive && hasChildren && inDndPath)
|
$: pad = pad || (interactive && hasChildren && inDndPath)
|
||||||
$: $dndIsDragging, (pad = false)
|
$: $dndIsDragging, (pad = false)
|
||||||
|
|
||||||
|
// Determine whether we should render a skeleton loader for this component
|
||||||
|
$: showSkeleton =
|
||||||
|
$loading &&
|
||||||
|
definition.name !== "Screenslot" &&
|
||||||
|
children.length === 0 &&
|
||||||
|
!instance._blockElementHasChildren &&
|
||||||
|
!definition.block &&
|
||||||
|
definition.skeleton !== false
|
||||||
|
|
||||||
// Update component context
|
// Update component context
|
||||||
$: store.set({
|
$: store.set({
|
||||||
id,
|
id,
|
||||||
|
@ -473,14 +482,6 @@
|
||||||
componentStore.actions.unregisterInstance(id)
|
componentStore.actions.unregisterInstance(id)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
$: showSkeleton =
|
|
||||||
$loading &&
|
|
||||||
definition.name !== "Screenslot" &&
|
|
||||||
children.length === 0 &&
|
|
||||||
!instance._blockElementHasChildren &&
|
|
||||||
!definition.block &&
|
|
||||||
definition.skeleton !== false
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if showSkeleton}
|
{#if showSkeleton}
|
||||||
|
|
|
@ -11,20 +11,23 @@
|
||||||
export let limit
|
export let limit
|
||||||
export let paginate
|
export let paginate
|
||||||
|
|
||||||
const loading = writable(false)
|
|
||||||
|
|
||||||
const { styleable, Provider, ActionTypes, API } = getContext("sdk")
|
const { styleable, Provider, ActionTypes, API } = getContext("sdk")
|
||||||
const component = getContext("component")
|
const component = getContext("component")
|
||||||
|
|
||||||
|
// Update loading state
|
||||||
|
const parentLoading = getContext("loading")
|
||||||
|
const loading = writable(true)
|
||||||
|
setContext("loading", loading)
|
||||||
|
|
||||||
// We need to manage our lucene query manually as we want to allow components
|
// We need to manage our lucene query manually as we want to allow components
|
||||||
// to extend it
|
// to extend it
|
||||||
let queryExtensions = {}
|
let queryExtensions = {}
|
||||||
$: defaultQuery = LuceneUtils.buildLuceneQuery(filter)
|
$: defaultQuery = LuceneUtils.buildLuceneQuery(filter)
|
||||||
$: query = extendQuery(defaultQuery, queryExtensions)
|
$: query = extendQuery(defaultQuery, queryExtensions)
|
||||||
|
|
||||||
// Keep our data fetch instance up to date
|
// Fetch data and refresh when needed
|
||||||
$: fetch = createFetch(dataSource)
|
$: fetch = createFetch(dataSource, $parentLoading)
|
||||||
$: fetch.update({
|
$: updateFetch({
|
||||||
query,
|
query,
|
||||||
sortColumn,
|
sortColumn,
|
||||||
sortOrder,
|
sortOrder,
|
||||||
|
@ -32,6 +35,9 @@
|
||||||
paginate,
|
paginate,
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// Keep loading context updated
|
||||||
|
$: loading.set($parentLoading || !$fetch.loaded)
|
||||||
|
|
||||||
// Build our action context
|
// Build our action context
|
||||||
$: actions = [
|
$: actions = [
|
||||||
{
|
{
|
||||||
|
@ -80,14 +86,21 @@
|
||||||
sortColumn: $fetch.sortColumn,
|
sortColumn: $fetch.sortColumn,
|
||||||
sortOrder: $fetch.sortOrder,
|
sortOrder: $fetch.sortOrder,
|
||||||
},
|
},
|
||||||
limit: limit,
|
limit,
|
||||||
}
|
}
|
||||||
|
|
||||||
const parentLoading = getContext("loading")
|
const createFetch = (datasource, parentLoading) => {
|
||||||
setContext("loading", loading)
|
// Return a dummy fetch if parent is still loading. We do this so that we
|
||||||
$: loading.set($parentLoading || !$fetch.loaded)
|
// can still properly subscribe to a valid fetch object and check all
|
||||||
|
// properties, but we want to avoid fetching the real data until all parents
|
||||||
|
// have finished loading.
|
||||||
|
// This logic is only needed due to skeleton loaders, as previously we
|
||||||
|
// simply blocked component rendering until data was ready.
|
||||||
|
if (parentLoading) {
|
||||||
|
return fetchData({ API })
|
||||||
|
}
|
||||||
|
|
||||||
const createFetch = datasource => {
|
// Otherwise return the real thing
|
||||||
return fetchData({
|
return fetchData({
|
||||||
API,
|
API,
|
||||||
datasource,
|
datasource,
|
||||||
|
@ -101,6 +114,14 @@
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const updateFetch = opts => {
|
||||||
|
// Only update fetch if parents have stopped loading. Otherwise we will
|
||||||
|
// trigger a fetch of the real data before parents are ready.
|
||||||
|
if (!$parentLoading) {
|
||||||
|
fetch.update(opts)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const addQueryExtension = (key, extension) => {
|
const addQueryExtension = (key, extension) => {
|
||||||
if (!key || !extension) {
|
if (!key || !extension) {
|
||||||
return
|
return
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
<script>
|
<script>
|
||||||
import { getContext } from "svelte"
|
import { getContext, setContext } from "svelte"
|
||||||
import InnerForm from "./InnerForm.svelte"
|
import InnerForm from "./InnerForm.svelte"
|
||||||
import { Helpers } from "@budibase/bbui"
|
import { Helpers } from "@budibase/bbui"
|
||||||
|
import { writable } from "svelte/store"
|
||||||
|
|
||||||
export let dataSource
|
export let dataSource
|
||||||
export let theme
|
export let theme
|
||||||
|
@ -20,10 +21,17 @@
|
||||||
const context = getContext("context")
|
const context = getContext("context")
|
||||||
const { API, fetchDatasourceSchema } = getContext("sdk")
|
const { API, fetchDatasourceSchema } = getContext("sdk")
|
||||||
|
|
||||||
|
// Forms also use loading context as they require loading a schema
|
||||||
|
const parentLoading = getContext("loading")
|
||||||
|
const loading = writable(true)
|
||||||
|
setContext("loading", loading)
|
||||||
|
|
||||||
|
let loaded = false
|
||||||
let schema
|
let schema
|
||||||
let table
|
let table
|
||||||
|
|
||||||
$: fetchSchema(dataSource)
|
$: fetchSchema(dataSource)
|
||||||
|
$: loading.set($parentLoading || !loaded)
|
||||||
|
|
||||||
// Returns the closes data context which isn't a built in context
|
// Returns the closes data context which isn't a built in context
|
||||||
const getInitialValues = (type, dataSource, context) => {
|
const getInitialValues = (type, dataSource, context) => {
|
||||||
|
@ -55,11 +63,14 @@
|
||||||
}
|
}
|
||||||
const res = await fetchDatasourceSchema(dataSource)
|
const res = await fetchDatasourceSchema(dataSource)
|
||||||
schema = res || {}
|
schema = res || {}
|
||||||
|
if (!loaded) {
|
||||||
|
loaded = true
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
$: initialValues = getInitialValues(actionType, dataSource, $context)
|
$: initialValues = getInitialValues(actionType, dataSource, $context)
|
||||||
$: resetKey = Helpers.hashString(
|
$: resetKey = Helpers.hashString(
|
||||||
!!schema +
|
loaded +
|
||||||
JSON.stringify(initialValues) +
|
JSON.stringify(initialValues) +
|
||||||
JSON.stringify(dataSource) +
|
JSON.stringify(dataSource) +
|
||||||
disabled
|
disabled
|
||||||
|
|
|
@ -128,21 +128,15 @@
|
||||||
return fields.find(field => get(field).name === name)
|
return fields.find(field => get(field).name === name)
|
||||||
}
|
}
|
||||||
|
|
||||||
const getDefault = (defaultValue, schema, type) => {
|
// Sanitises a value by ensuring it doesn't contain any invalid data
|
||||||
// Remove any values not present in the field schema
|
const sanitiseValue = (value, schema, type) => {
|
||||||
// Convert any values supplied to string
|
// Check arrays - remove any values not present in the field schema and
|
||||||
if (Array.isArray(defaultValue) && type == "array" && schema) {
|
// convert any values supplied to strings
|
||||||
return defaultValue.reduce((acc, entry) => {
|
if (Array.isArray(value) && type === "array" && schema) {
|
||||||
let processedOption = String(entry)
|
const options = schema?.constraints.inclusion || []
|
||||||
let schemaOptions = schema.constraints.inclusion
|
return value.map(opt => String(opt)).filter(opt => options.includes(opt))
|
||||||
if (schemaOptions.indexOf(processedOption) > -1) {
|
|
||||||
acc.push(processedOption)
|
|
||||||
}
|
|
||||||
return acc
|
|
||||||
}, [])
|
|
||||||
} else {
|
|
||||||
return defaultValue
|
|
||||||
}
|
}
|
||||||
|
return value
|
||||||
}
|
}
|
||||||
|
|
||||||
const formApi = {
|
const formApi = {
|
||||||
|
@ -160,7 +154,6 @@
|
||||||
|
|
||||||
// Create validation function based on field schema
|
// Create validation function based on field schema
|
||||||
const schemaConstraints = schema?.[field]?.constraints
|
const schemaConstraints = schema?.[field]?.constraints
|
||||||
|
|
||||||
const validator = disableValidation
|
const validator = disableValidation
|
||||||
? null
|
? null
|
||||||
: createValidatorFromConstraints(
|
: createValidatorFromConstraints(
|
||||||
|
@ -170,10 +163,11 @@
|
||||||
table
|
table
|
||||||
)
|
)
|
||||||
|
|
||||||
const parsedDefault = getDefault(defaultValue, schema?.[field], type)
|
// Sanitise the default value to ensure it doesn't contain invalid data
|
||||||
|
defaultValue = sanitiseValue(defaultValue, schema?.[field], type)
|
||||||
|
|
||||||
// If we've already registered this field then keep some existing state
|
// If we've already registered this field then keep some existing state
|
||||||
let initialValue = Helpers.deepGet(initialValues, field) ?? parsedDefault
|
let initialValue = Helpers.deepGet(initialValues, field) ?? defaultValue
|
||||||
let initialError = null
|
let initialError = null
|
||||||
let fieldId = `id-${Helpers.uuid()}`
|
let fieldId = `id-${Helpers.uuid()}`
|
||||||
const existingField = getField(field)
|
const existingField = getField(field)
|
||||||
|
@ -183,7 +177,9 @@
|
||||||
|
|
||||||
// Determine the initial value for this field, reusing the current
|
// Determine the initial value for this field, reusing the current
|
||||||
// value if one exists
|
// value if one exists
|
||||||
initialValue = fieldState.value ?? initialValue
|
if (fieldState.value != null && fieldState.value !== "") {
|
||||||
|
initialValue = fieldState.value
|
||||||
|
}
|
||||||
|
|
||||||
// If this field has already been registered and we previously had an
|
// If this field has already been registered and we previously had an
|
||||||
// error set, then re-run the validator to see if we can unset it
|
// error set, then re-run the validator to see if we can unset it
|
||||||
|
@ -206,11 +202,11 @@
|
||||||
error: initialError,
|
error: initialError,
|
||||||
disabled:
|
disabled:
|
||||||
disabled || fieldDisabled || (isAutoColumn && !editAutoColumns),
|
disabled || fieldDisabled || (isAutoColumn && !editAutoColumns),
|
||||||
defaultValue: parsedDefault,
|
defaultValue,
|
||||||
validator,
|
validator,
|
||||||
lastUpdate: Date.now(),
|
lastUpdate: Date.now(),
|
||||||
},
|
},
|
||||||
fieldApi: makeFieldApi(field, parsedDefault),
|
fieldApi: makeFieldApi(field),
|
||||||
fieldSchema: schema?.[field] ?? {},
|
fieldSchema: schema?.[field] ?? {},
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -225,18 +221,9 @@
|
||||||
return fieldInfo
|
return fieldInfo
|
||||||
},
|
},
|
||||||
validate: () => {
|
validate: () => {
|
||||||
let valid = true
|
return fields
|
||||||
let validationFields = fields
|
.filter(field => get(field).step === get(currentStep))
|
||||||
|
.every(field => get(field).fieldApi.validate())
|
||||||
validationFields = fields.filter(f => get(f).step === get(currentStep))
|
|
||||||
|
|
||||||
// Validate fields and check if any are invalid
|
|
||||||
validationFields.forEach(field => {
|
|
||||||
if (!get(field).fieldApi.validate()) {
|
|
||||||
valid = false
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return valid
|
|
||||||
},
|
},
|
||||||
reset: () => {
|
reset: () => {
|
||||||
// Reset the form by resetting each individual field
|
// Reset the form by resetting each individual field
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/frontend-core",
|
"name": "@budibase/frontend-core",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Budibase frontend core libraries used in builder and client",
|
"description": "Budibase frontend core libraries used in builder and client",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
"svelte": "src/index.js",
|
"svelte": "src/index.js",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/bbui": "^2.2.12",
|
"@budibase/bbui": "^2.2.22",
|
||||||
"lodash": "^4.17.21",
|
"lodash": "^4.17.21",
|
||||||
"svelte": "^3.46.2"
|
"svelte": "^3.46.2"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/sdk",
|
"name": "@budibase/sdk",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Budibase Public API SDK",
|
"description": "Budibase Public API SDK",
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "MPL-2.0",
|
"license": "MPL-2.0",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/server",
|
"name": "@budibase/server",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Budibase Web Server",
|
"description": "Budibase Web Server",
|
||||||
"main": "src/index.ts",
|
"main": "src/index.ts",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -43,11 +43,11 @@
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apidevtools/swagger-parser": "10.0.3",
|
"@apidevtools/swagger-parser": "10.0.3",
|
||||||
"@budibase/backend-core": "^2.2.12",
|
"@budibase/backend-core": "^2.2.22",
|
||||||
"@budibase/client": "^2.2.12",
|
"@budibase/client": "^2.2.22",
|
||||||
"@budibase/pro": "2.2.12",
|
"@budibase/pro": "2.2.22",
|
||||||
"@budibase/string-templates": "^2.2.12",
|
"@budibase/string-templates": "^2.2.22",
|
||||||
"@budibase/types": "^2.2.12",
|
"@budibase/types": "^2.2.22",
|
||||||
"@bull-board/api": "3.7.0",
|
"@bull-board/api": "3.7.0",
|
||||||
"@bull-board/koa": "3.9.4",
|
"@bull-board/koa": "3.9.4",
|
||||||
"@elastic/elasticsearch": "7.10.0",
|
"@elastic/elasticsearch": "7.10.0",
|
||||||
|
|
|
@ -2,12 +2,6 @@
|
||||||
const compose = require("docker-compose")
|
const compose = require("docker-compose")
|
||||||
const path = require("path")
|
const path = require("path")
|
||||||
const fs = require("fs")
|
const fs = require("fs")
|
||||||
const isWsl = require("is-wsl")
|
|
||||||
const { processStringSync } = require("@budibase/string-templates")
|
|
||||||
|
|
||||||
function isLinux() {
|
|
||||||
return !isWsl && process.platform !== "darwin" && process.platform !== "win32"
|
|
||||||
}
|
|
||||||
|
|
||||||
// This script wraps docker-compose allowing you to manage your dev infrastructure with simple commands.
|
// This script wraps docker-compose allowing you to manage your dev infrastructure with simple commands.
|
||||||
const CONFIG = {
|
const CONFIG = {
|
||||||
|
@ -23,16 +17,6 @@ const Commands = {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function init() {
|
async function init() {
|
||||||
// generate nginx file, always do this incase it has changed
|
|
||||||
const hostingPath = path.join(process.cwd(), "..", "..", "hosting")
|
|
||||||
const nginxHbsPath = path.join(hostingPath, "nginx.dev.conf.hbs")
|
|
||||||
const nginxOutputPath = path.join(hostingPath, ".generated-nginx.dev.conf")
|
|
||||||
const contents = fs.readFileSync(nginxHbsPath, "utf8")
|
|
||||||
const config = {
|
|
||||||
address: isLinux() ? "172.17.0.1" : "host.docker.internal",
|
|
||||||
}
|
|
||||||
fs.writeFileSync(nginxOutputPath, processStringSync(contents, config))
|
|
||||||
|
|
||||||
const envFilePath = path.join(process.cwd(), ".env")
|
const envFilePath = path.join(process.cwd(), ".env")
|
||||||
if (!fs.existsSync(envFilePath)) {
|
if (!fs.existsSync(envFilePath)) {
|
||||||
const envFileJson = {
|
const envFileJson = {
|
||||||
|
|
|
@ -316,7 +316,13 @@ export async function checkForViewUpdates(
|
||||||
|
|
||||||
// Update view if required
|
// Update view if required
|
||||||
if (needsUpdated) {
|
if (needsUpdated) {
|
||||||
const newViewTemplate = viewTemplate(view.meta)
|
const groupByField: any = Object.values(table.schema).find(
|
||||||
|
(field: any) => field.name == view.groupBy
|
||||||
|
)
|
||||||
|
const newViewTemplate = viewTemplate(
|
||||||
|
view.meta,
|
||||||
|
groupByField?.type === FieldTypes.ARRAY
|
||||||
|
)
|
||||||
await saveView(null, view.name, newViewTemplate)
|
await saveView(null, view.name, newViewTemplate)
|
||||||
if (!newViewTemplate.meta.schema) {
|
if (!newViewTemplate.meta.schema) {
|
||||||
newViewTemplate.meta.schema = table.schema
|
newViewTemplate.meta.schema = table.schema
|
||||||
|
|
|
@ -25,7 +25,15 @@ export async function fetch(ctx: BBContext) {
|
||||||
export async function save(ctx: BBContext) {
|
export async function save(ctx: BBContext) {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
const { originalName, ...viewToSave } = ctx.request.body
|
const { originalName, ...viewToSave } = ctx.request.body
|
||||||
const view = viewTemplate(viewToSave)
|
|
||||||
|
const existingTable = await db.get(ctx.request.body.tableId)
|
||||||
|
const table = cloneDeep(existingTable)
|
||||||
|
|
||||||
|
const groupByField: any = Object.values(table.schema).find(
|
||||||
|
(field: any) => field.name == viewToSave.groupBy
|
||||||
|
)
|
||||||
|
|
||||||
|
const view = viewTemplate(viewToSave, groupByField?.type === FieldTypes.ARRAY)
|
||||||
const viewName = viewToSave.name
|
const viewName = viewToSave.name
|
||||||
|
|
||||||
if (!viewName) {
|
if (!viewName) {
|
||||||
|
@ -35,8 +43,6 @@ export async function save(ctx: BBContext) {
|
||||||
await saveView(originalName, viewName, view)
|
await saveView(originalName, viewName, view)
|
||||||
|
|
||||||
// add views to table document
|
// add views to table document
|
||||||
const existingTable = await db.get(ctx.request.body.tableId)
|
|
||||||
const table = cloneDeep(existingTable)
|
|
||||||
if (!table.views) table.views = {}
|
if (!table.views) table.views = {}
|
||||||
if (!view.meta.schema) {
|
if (!view.meta.schema) {
|
||||||
view.meta.schema = table.schema
|
view.meta.schema = table.schema
|
||||||
|
|
|
@ -6,6 +6,7 @@ type ViewTemplateOpts = {
|
||||||
groupBy: string
|
groupBy: string
|
||||||
filters: ViewFilter[]
|
filters: ViewFilter[]
|
||||||
calculation: string
|
calculation: string
|
||||||
|
groupByMulti: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
const TOKEN_MAP: Record<string, string> = {
|
const TOKEN_MAP: Record<string, string> = {
|
||||||
|
@ -41,6 +42,12 @@ const GROUP_PROPERTY: Record<string, { type: string }> = {
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const GROUP_PROPERTY_MULTI: Record<string, { type: string }> = {
|
||||||
|
group: {
|
||||||
|
type: "array",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
const FIELD_PROPERTY: Record<string, { type: string }> = {
|
const FIELD_PROPERTY: Record<string, { type: string }> = {
|
||||||
field: {
|
field: {
|
||||||
type: "string",
|
type: "string",
|
||||||
|
@ -136,13 +143,10 @@ function parseEmitExpression(field: string, groupBy: string) {
|
||||||
* filters: Array of filter objects containing predicates that are parsed into a JS expression
|
* filters: Array of filter objects containing predicates that are parsed into a JS expression
|
||||||
* calculation: an optional calculation to be performed over the view data.
|
* calculation: an optional calculation to be performed over the view data.
|
||||||
*/
|
*/
|
||||||
export = function ({
|
export = function (
|
||||||
field,
|
{ field, tableId, groupBy, filters = [], calculation }: ViewTemplateOpts,
|
||||||
tableId,
|
groupByMulti?: boolean
|
||||||
groupBy,
|
) {
|
||||||
filters = [],
|
|
||||||
calculation,
|
|
||||||
}: ViewTemplateOpts) {
|
|
||||||
// first filter can't have a conjunction
|
// first filter can't have a conjunction
|
||||||
if (filters && filters.length > 0 && filters[0].conjunction) {
|
if (filters && filters.length > 0 && filters[0].conjunction) {
|
||||||
delete filters[0].conjunction
|
delete filters[0].conjunction
|
||||||
|
@ -151,9 +155,11 @@ export = function ({
|
||||||
let schema = null,
|
let schema = null,
|
||||||
statFilter = null
|
statFilter = null
|
||||||
|
|
||||||
|
let groupBySchema = groupByMulti ? GROUP_PROPERTY_MULTI : GROUP_PROPERTY
|
||||||
|
|
||||||
if (calculation) {
|
if (calculation) {
|
||||||
schema = {
|
schema = {
|
||||||
...(groupBy ? GROUP_PROPERTY : FIELD_PROPERTY),
|
...(groupBy ? groupBySchema : FIELD_PROPERTY),
|
||||||
...SCHEMA_MAP[calculation],
|
...SCHEMA_MAP[calculation],
|
||||||
}
|
}
|
||||||
if (
|
if (
|
||||||
|
|
|
@ -23,9 +23,6 @@ const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z"
|
||||||
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
|
const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z"
|
||||||
|
|
||||||
function likeKey(client: string, key: string): string {
|
function likeKey(client: string, key: string): string {
|
||||||
if (!key.includes(" ")) {
|
|
||||||
return key
|
|
||||||
}
|
|
||||||
let start: string, end: string
|
let start: string, end: string
|
||||||
switch (client) {
|
switch (client) {
|
||||||
case SqlClient.MY_SQL:
|
case SqlClient.MY_SQL:
|
||||||
|
@ -235,7 +232,9 @@ class InternalBuilder {
|
||||||
} else {
|
} else {
|
||||||
const rawFnc = `${fnc}Raw`
|
const rawFnc = `${fnc}Raw`
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
query = query[rawFnc](`LOWER(${key}) LIKE ?`, [`${value}%`])
|
query = query[rawFnc](`LOWER(${likeKey(this.client, key)}) LIKE ?`, [
|
||||||
|
`${value}%`,
|
||||||
|
])
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -147,7 +147,8 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
||||||
if (
|
if (
|
||||||
field.type == "DATETIME" ||
|
field.type == "DATETIME" ||
|
||||||
field.type === "DATE" ||
|
field.type === "DATE" ||
|
||||||
field.type === "TIMESTAMP"
|
field.type === "TIMESTAMP" ||
|
||||||
|
field.type === "LONGLONG"
|
||||||
) {
|
) {
|
||||||
return field.string()
|
return field.string()
|
||||||
}
|
}
|
||||||
|
|
|
@ -352,7 +352,7 @@ describe("SQL query builder", () => {
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: [10, "%20%", "%25%", `%"John"%`, `%"Mary"%`],
|
bindings: [10, "%20%", "%25%", `%"John"%`, `%"Mary"%`],
|
||||||
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER(${TABLE_NAME}.age) LIKE @p1 AND LOWER(${TABLE_NAME}.age) LIKE @p2) and (LOWER(${TABLE_NAME}.name) LIKE @p3 AND LOWER(${TABLE_NAME}.name) LIKE @p4)) as [${TABLE_NAME}]`,
|
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER([${TABLE_NAME}].[age]) LIKE @p1 AND LOWER([${TABLE_NAME}].[age]) LIKE @p2) and (LOWER([${TABLE_NAME}].[name]) LIKE @p3 AND LOWER([${TABLE_NAME}].[name]) LIKE @p4)) as [${TABLE_NAME}]`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -403,7 +403,7 @@ describe("SQL query builder", () => {
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: [10, "%20%", `%"John"%`],
|
bindings: [10, "%20%", `%"John"%`],
|
||||||
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where NOT (LOWER(${TABLE_NAME}.age) LIKE @p1) and NOT (LOWER(${TABLE_NAME}.name) LIKE @p2)) as [${TABLE_NAME}]`,
|
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where NOT (LOWER([${TABLE_NAME}].[age]) LIKE @p1) and NOT (LOWER([${TABLE_NAME}].[name]) LIKE @p2)) as [${TABLE_NAME}]`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -454,7 +454,7 @@ describe("SQL query builder", () => {
|
||||||
)
|
)
|
||||||
expect(query).toEqual({
|
expect(query).toEqual({
|
||||||
bindings: [10, "%20%", "%25%", `%"John"%`, `%"Mary"%`],
|
bindings: [10, "%20%", "%25%", `%"John"%`, `%"Mary"%`],
|
||||||
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER(${TABLE_NAME}.age) LIKE @p1 OR LOWER(${TABLE_NAME}.age) LIKE @p2) and (LOWER(${TABLE_NAME}.name) LIKE @p3 OR LOWER(${TABLE_NAME}.name) LIKE @p4)) as [${TABLE_NAME}]`,
|
sql: `select * from (select top (@p0) * from [${TABLE_NAME}] where (LOWER([${TABLE_NAME}].[age]) LIKE @p1 OR LOWER([${TABLE_NAME}].[age]) LIKE @p2) and (LOWER([${TABLE_NAME}].[name]) LIKE @p3 OR LOWER([${TABLE_NAME}].[name]) LIKE @p4)) as [${TABLE_NAME}]`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -517,4 +517,40 @@ describe("SQL query builder", () => {
|
||||||
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" on "products"."product_id" = "stocks"."product_id" limit $2`,
|
sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" on "products"."product_id" = "stocks"."product_id" limit $2`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should handle table names with dashes when performing a LIKE in MySQL", () => {
|
||||||
|
const tableName = "Table-Name-With-Dashes"
|
||||||
|
const query = new Sql(SqlClient.MY_SQL, limit)._query(
|
||||||
|
generateReadJson({
|
||||||
|
table: tableName,
|
||||||
|
filters: {
|
||||||
|
string: {
|
||||||
|
name: "John",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
expect(query).toEqual({
|
||||||
|
bindings: ["John%", limit],
|
||||||
|
sql: `select * from (select * from \`${tableName}\` where LOWER(\`${tableName}\`.\`name\`) LIKE ? limit ?) as \`${tableName}\``,
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should handle table names with dashes when performing a LIKE in SQL Server", () => {
|
||||||
|
const tableName = "Table-Name-With-Dashes"
|
||||||
|
const query = new Sql(SqlClient.MS_SQL, limit)._query(
|
||||||
|
generateReadJson({
|
||||||
|
table: tableName,
|
||||||
|
filters: {
|
||||||
|
string: {
|
||||||
|
name: "John",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
expect(query).toEqual({
|
||||||
|
bindings: [limit, "John%"],
|
||||||
|
sql: `select * from (select top (@p0) * from [${tableName}] where LOWER([${tableName}].[name]) LIKE @p1) as [${tableName}]`,
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -8,7 +8,6 @@ const ROW_ID_REGEX = /^\[.*]$/g
|
||||||
const SQL_NUMBER_TYPE_MAP = {
|
const SQL_NUMBER_TYPE_MAP = {
|
||||||
integer: FieldTypes.NUMBER,
|
integer: FieldTypes.NUMBER,
|
||||||
int: FieldTypes.NUMBER,
|
int: FieldTypes.NUMBER,
|
||||||
bigint: FieldTypes.NUMBER,
|
|
||||||
decimal: FieldTypes.NUMBER,
|
decimal: FieldTypes.NUMBER,
|
||||||
smallint: FieldTypes.NUMBER,
|
smallint: FieldTypes.NUMBER,
|
||||||
real: FieldTypes.NUMBER,
|
real: FieldTypes.NUMBER,
|
||||||
|
@ -47,6 +46,7 @@ const SQL_STRING_TYPE_MAP = {
|
||||||
blob: FieldTypes.STRING,
|
blob: FieldTypes.STRING,
|
||||||
long: FieldTypes.STRING,
|
long: FieldTypes.STRING,
|
||||||
text: FieldTypes.STRING,
|
text: FieldTypes.STRING,
|
||||||
|
bigint: FieldTypes.STRING,
|
||||||
}
|
}
|
||||||
|
|
||||||
const SQL_BOOLEAN_TYPE_MAP = {
|
const SQL_BOOLEAN_TYPE_MAP = {
|
||||||
|
@ -141,12 +141,18 @@ export function breakRowIdField(_id: string | { _id: string }): any[] {
|
||||||
export function convertSqlType(type: string) {
|
export function convertSqlType(type: string) {
|
||||||
let foundType = FieldTypes.STRING
|
let foundType = FieldTypes.STRING
|
||||||
const lcType = type.toLowerCase()
|
const lcType = type.toLowerCase()
|
||||||
|
let matchingTypes = []
|
||||||
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
for (let [external, internal] of Object.entries(SQL_TYPE_MAP)) {
|
||||||
if (lcType.includes(external)) {
|
if (lcType.includes(external)) {
|
||||||
foundType = internal
|
matchingTypes.push({ external, internal })
|
||||||
break
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
//Set the foundType based the longest match
|
||||||
|
if (matchingTypes.length > 0) {
|
||||||
|
foundType = matchingTypes.reduce((acc, val) => {
|
||||||
|
return acc.external.length >= val.external.length ? acc : val
|
||||||
|
}).internal
|
||||||
|
}
|
||||||
const schema: any = { type: foundType }
|
const schema: any = { type: foundType }
|
||||||
if (foundType === FieldTypes.DATETIME) {
|
if (foundType === FieldTypes.DATETIME) {
|
||||||
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lcType)
|
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lcType)
|
||||||
|
|
|
@ -89,7 +89,7 @@ class QueryRunner {
|
||||||
let query
|
let query
|
||||||
// handle SQL injections by interpolating the variables
|
// handle SQL injections by interpolating the variables
|
||||||
if (isSQL(datasourceClone)) {
|
if (isSQL(datasourceClone)) {
|
||||||
query = interpolateSQL(fieldsClone, enrichedParameters, integration)
|
query = interpolateSQL(fieldsClone, enrichedContext, integration)
|
||||||
} else {
|
} else {
|
||||||
query = enrichQueryFields(fieldsClone, enrichedContext)
|
query = enrichQueryFields(fieldsClone, enrichedContext)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1273,13 +1273,13 @@
|
||||||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||||
|
|
||||||
"@budibase/backend-core@2.2.12":
|
"@budibase/backend-core@2.2.22":
|
||||||
version "2.2.12"
|
version "2.2.22"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.12.tgz#d109a4196e2ca29319649b37b5ba9233e5f3b7ea"
|
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.22.tgz#24e7d27d861429918d3dd868f78c37e5b260c464"
|
||||||
integrity sha512-OgHZhHvpG02CRiJqQfAcXATkNgsi/mF6/cZ1gVCwRl7gccSCLIEUf17SkM1MRykdERU6R93Fkv0Z7LOrU6bn4A==
|
integrity sha512-AGkzi46Yjwaa6RyqtFWzzrhRlamESnGpnbZO1PqEqhsAt9xFJjhqXk/H2i9eI7DtfcoZH57BY/sUGSG/Gka2xw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@budibase/nano" "10.1.1"
|
"@budibase/nano" "10.1.1"
|
||||||
"@budibase/types" "^2.2.12"
|
"@budibase/types" "^2.2.22"
|
||||||
"@shopify/jest-koa-mocks" "5.0.1"
|
"@shopify/jest-koa-mocks" "5.0.1"
|
||||||
"@techpass/passport-openidconnect" "0.3.2"
|
"@techpass/passport-openidconnect" "0.3.2"
|
||||||
aws-sdk "2.1030.0"
|
aws-sdk "2.1030.0"
|
||||||
|
@ -1372,13 +1372,13 @@
|
||||||
qs "^6.11.0"
|
qs "^6.11.0"
|
||||||
tough-cookie "^4.1.2"
|
tough-cookie "^4.1.2"
|
||||||
|
|
||||||
"@budibase/pro@2.2.12":
|
"@budibase/pro@2.2.22":
|
||||||
version "2.2.12"
|
version "2.2.22"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.12.tgz#71d664718a8c0a11e1350ae2db91bc82b96850bc"
|
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.22.tgz#36149df23388fc2363f2ae43e960e18e1a82c5de"
|
||||||
integrity sha512-weEUNepZSierkBi/EMhKiT52VAsdc2d7GxD1jvAU0NcLDm4C1lfVt8sBSZ7+RmQS4V9n5XMqgORcnV5HmLOUIg==
|
integrity sha512-xLLbMwCtumxEywQRjTo3NFEfUzCon8713a3AXUzqMmQScvLDqKvSBOAAFIo3Buadh2Ks+YY1zPBgI3ekhHm0tA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@budibase/backend-core" "2.2.12"
|
"@budibase/backend-core" "2.2.22"
|
||||||
"@budibase/types" "2.2.12"
|
"@budibase/types" "2.2.22"
|
||||||
"@koa/router" "8.0.8"
|
"@koa/router" "8.0.8"
|
||||||
bull "4.10.1"
|
bull "4.10.1"
|
||||||
joi "17.6.0"
|
joi "17.6.0"
|
||||||
|
@ -1402,10 +1402,10 @@
|
||||||
svelte-apexcharts "^1.0.2"
|
svelte-apexcharts "^1.0.2"
|
||||||
svelte-flatpickr "^3.1.0"
|
svelte-flatpickr "^3.1.0"
|
||||||
|
|
||||||
"@budibase/types@2.2.12", "@budibase/types@^2.2.12":
|
"@budibase/types@2.2.22", "@budibase/types@^2.2.22":
|
||||||
version "2.2.12"
|
version "2.2.22"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.12.tgz#a7e8e99bfa3e30f3c416b79636a2e5f4297017d3"
|
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.22.tgz#eebe5bddde2235c5692a05f69718416888853b3a"
|
||||||
integrity sha512-2Tc74etbXi4bPAyGYdyLCbQzY0y3DhrEe6ZPSGLi2GzI2uePH7qlUKSaS8bz4cxbrXccZoq+DwzAbf9rL9pZww==
|
integrity sha512-wrvjyrKn8RXUFc5WfUhRxzJCiLHvA6qmxeuFVGT/S4B/CDu1yiWckKA/rDnIdU3WKpXA4MiXzYy6K02wv8TYlg==
|
||||||
|
|
||||||
"@bull-board/api@3.7.0":
|
"@bull-board/api@3.7.0":
|
||||||
version "3.7.0"
|
version "3.7.0"
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/string-templates",
|
"name": "@budibase/string-templates",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Handlebars wrapper for Budibase templating.",
|
"description": "Handlebars wrapper for Budibase templating.",
|
||||||
"main": "src/index.cjs",
|
"main": "src/index.cjs",
|
||||||
"module": "dist/bundle.mjs",
|
"module": "dist/bundle.mjs",
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/types",
|
"name": "@budibase/types",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Budibase types",
|
"description": "Budibase types",
|
||||||
"main": "dist/index.js",
|
"main": "dist/index.js",
|
||||||
"types": "dist/index.d.ts",
|
"types": "dist/index.d.ts",
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
{
|
{
|
||||||
"name": "@budibase/worker",
|
"name": "@budibase/worker",
|
||||||
"email": "hi@budibase.com",
|
"email": "hi@budibase.com",
|
||||||
"version": "2.2.12",
|
"version": "2.2.22",
|
||||||
"description": "Budibase background service",
|
"description": "Budibase background service",
|
||||||
"main": "src/index.ts",
|
"main": "src/index.ts",
|
||||||
"repository": {
|
"repository": {
|
||||||
|
@ -36,10 +36,10 @@
|
||||||
"author": "Budibase",
|
"author": "Budibase",
|
||||||
"license": "GPL-3.0",
|
"license": "GPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@budibase/backend-core": "^2.2.12",
|
"@budibase/backend-core": "^2.2.22",
|
||||||
"@budibase/pro": "2.2.12",
|
"@budibase/pro": "2.2.22",
|
||||||
"@budibase/string-templates": "^2.2.12",
|
"@budibase/string-templates": "^2.2.22",
|
||||||
"@budibase/types": "^2.2.12",
|
"@budibase/types": "^2.2.22",
|
||||||
"@koa/router": "8.0.8",
|
"@koa/router": "8.0.8",
|
||||||
"@sentry/node": "6.17.7",
|
"@sentry/node": "6.17.7",
|
||||||
"@techpass/passport-openidconnect": "0.3.2",
|
"@techpass/passport-openidconnect": "0.3.2",
|
||||||
|
|
|
@ -470,13 +470,13 @@
|
||||||
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
|
||||||
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
|
||||||
|
|
||||||
"@budibase/backend-core@2.2.12":
|
"@budibase/backend-core@2.2.22":
|
||||||
version "2.2.12"
|
version "2.2.22"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.12.tgz#d109a4196e2ca29319649b37b5ba9233e5f3b7ea"
|
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.2.22.tgz#24e7d27d861429918d3dd868f78c37e5b260c464"
|
||||||
integrity sha512-OgHZhHvpG02CRiJqQfAcXATkNgsi/mF6/cZ1gVCwRl7gccSCLIEUf17SkM1MRykdERU6R93Fkv0Z7LOrU6bn4A==
|
integrity sha512-AGkzi46Yjwaa6RyqtFWzzrhRlamESnGpnbZO1PqEqhsAt9xFJjhqXk/H2i9eI7DtfcoZH57BY/sUGSG/Gka2xw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@budibase/nano" "10.1.1"
|
"@budibase/nano" "10.1.1"
|
||||||
"@budibase/types" "^2.2.12"
|
"@budibase/types" "^2.2.22"
|
||||||
"@shopify/jest-koa-mocks" "5.0.1"
|
"@shopify/jest-koa-mocks" "5.0.1"
|
||||||
"@techpass/passport-openidconnect" "0.3.2"
|
"@techpass/passport-openidconnect" "0.3.2"
|
||||||
aws-sdk "2.1030.0"
|
aws-sdk "2.1030.0"
|
||||||
|
@ -519,22 +519,22 @@
|
||||||
qs "^6.11.0"
|
qs "^6.11.0"
|
||||||
tough-cookie "^4.1.2"
|
tough-cookie "^4.1.2"
|
||||||
|
|
||||||
"@budibase/pro@2.2.12":
|
"@budibase/pro@2.2.22":
|
||||||
version "2.2.12"
|
version "2.2.22"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.12.tgz#71d664718a8c0a11e1350ae2db91bc82b96850bc"
|
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.2.22.tgz#36149df23388fc2363f2ae43e960e18e1a82c5de"
|
||||||
integrity sha512-weEUNepZSierkBi/EMhKiT52VAsdc2d7GxD1jvAU0NcLDm4C1lfVt8sBSZ7+RmQS4V9n5XMqgORcnV5HmLOUIg==
|
integrity sha512-xLLbMwCtumxEywQRjTo3NFEfUzCon8713a3AXUzqMmQScvLDqKvSBOAAFIo3Buadh2Ks+YY1zPBgI3ekhHm0tA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@budibase/backend-core" "2.2.12"
|
"@budibase/backend-core" "2.2.22"
|
||||||
"@budibase/types" "2.2.12"
|
"@budibase/types" "2.2.22"
|
||||||
"@koa/router" "8.0.8"
|
"@koa/router" "8.0.8"
|
||||||
bull "4.10.1"
|
bull "4.10.1"
|
||||||
joi "17.6.0"
|
joi "17.6.0"
|
||||||
node-fetch "^2.6.1"
|
node-fetch "^2.6.1"
|
||||||
|
|
||||||
"@budibase/types@2.2.12", "@budibase/types@^2.2.12":
|
"@budibase/types@2.2.22", "@budibase/types@^2.2.22":
|
||||||
version "2.2.12"
|
version "2.2.22"
|
||||||
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.12.tgz#a7e8e99bfa3e30f3c416b79636a2e5f4297017d3"
|
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.2.22.tgz#eebe5bddde2235c5692a05f69718416888853b3a"
|
||||||
integrity sha512-2Tc74etbXi4bPAyGYdyLCbQzY0y3DhrEe6ZPSGLi2GzI2uePH7qlUKSaS8bz4cxbrXccZoq+DwzAbf9rL9pZww==
|
integrity sha512-wrvjyrKn8RXUFc5WfUhRxzJCiLHvA6qmxeuFVGT/S4B/CDu1yiWckKA/rDnIdU3WKpXA4MiXzYy6K02wv8TYlg==
|
||||||
|
|
||||||
"@cspotcode/source-map-support@^0.8.0":
|
"@cspotcode/source-map-support@^0.8.0":
|
||||||
version "0.8.1"
|
version "0.8.1"
|
||||||
|
|
|
@ -1,111 +0,0 @@
|
||||||
#!/usr/bin/env node
|
|
||||||
const path = require("path")
|
|
||||||
const fs = require("fs")
|
|
||||||
|
|
||||||
function processStringSync(string, env) {
|
|
||||||
let output = ""
|
|
||||||
|
|
||||||
// process if statements
|
|
||||||
let removal = false
|
|
||||||
for (let line of string.split("\n")) {
|
|
||||||
if (new RegExp(`{{\/if}}`, "g").test(line)) {
|
|
||||||
removal = false
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!removal) {
|
|
||||||
const match = line.match(new RegExp(`{{#if (.*)}}`))
|
|
||||||
if (match) {
|
|
||||||
const key = match[1]
|
|
||||||
// check the if statement is true
|
|
||||||
if (!env[key]) {
|
|
||||||
removal = true
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
output += line + "\n"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let key in env) {
|
|
||||||
// replace variables
|
|
||||||
const rgx = new RegExp(`{{\\s*${key}\\s*}}`, "g")
|
|
||||||
output = output.replace(rgx, env[key])
|
|
||||||
}
|
|
||||||
|
|
||||||
return output
|
|
||||||
}
|
|
||||||
|
|
||||||
const Configs = {
|
|
||||||
prod: {
|
|
||||||
apps: "app-service.budibase.svc.cluster.local",
|
|
||||||
worker: "worker-service.budibase.svc.cluster.local",
|
|
||||||
minio: "minio-service.budibase.svc.cluster.local",
|
|
||||||
couchdb: "budibase-prod-svc-couchdb",
|
|
||||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
|
||||||
},
|
|
||||||
preprod: {
|
|
||||||
apps: "app-service.budibase.svc.cluster.local",
|
|
||||||
worker: "worker-service.budibase.svc.cluster.local",
|
|
||||||
minio: "minio-service.budibase.svc.cluster.local",
|
|
||||||
couchdb: "budibase-preprod-svc-couchdb",
|
|
||||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
|
||||||
},
|
|
||||||
release: {
|
|
||||||
apps: "app-service.budibase.svc.cluster.local",
|
|
||||||
worker: "worker-service.budibase.svc.cluster.local",
|
|
||||||
minio: "minio-service.budibase.svc.cluster.local",
|
|
||||||
couchdb: "budibase-release-svc-couchdb",
|
|
||||||
resolver: "kube-dns.kube-system.svc.cluster.local"
|
|
||||||
},
|
|
||||||
compose: {
|
|
||||||
apps: "app-service",
|
|
||||||
worker: "worker-service",
|
|
||||||
minio: "minio-service",
|
|
||||||
couchdb: "couchdb-service",
|
|
||||||
watchtower: "watchtower-service",
|
|
||||||
resolver: "127.0.0.11"
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const Commands = {
|
|
||||||
Prod: "prod",
|
|
||||||
Preprod: "preprod",
|
|
||||||
Release: "release",
|
|
||||||
Compose: "compose",
|
|
||||||
}
|
|
||||||
|
|
||||||
async function init(managementCommand) {
|
|
||||||
const config = Configs[managementCommand]
|
|
||||||
const hostingPath = path.join(process.cwd(), "hosting")
|
|
||||||
const nginxHbsPath = path.join(hostingPath, "nginx.prod.conf.hbs")
|
|
||||||
const nginxOutputPath = path.join(
|
|
||||||
hostingPath,
|
|
||||||
"proxy",
|
|
||||||
".generated-nginx.prod.conf"
|
|
||||||
)
|
|
||||||
const contents = fs.readFileSync(nginxHbsPath, "utf8")
|
|
||||||
fs.writeFileSync(nginxOutputPath, processStringSync(contents, config))
|
|
||||||
}
|
|
||||||
|
|
||||||
const managementCommand = process.argv.slice(2)[0]
|
|
||||||
|
|
||||||
if (
|
|
||||||
!managementCommand ||
|
|
||||||
!Object.values(Commands).some(command => managementCommand === command)
|
|
||||||
) {
|
|
||||||
throw new Error(
|
|
||||||
"You must supply either a 'compose', 'preprod' or 'prod' commmand to generate an NGINX config."
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
init(managementCommand)
|
|
||||||
.then(() => {
|
|
||||||
console.log("Done! 🎉")
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
console.error(
|
|
||||||
"Something went wrong while creating the nginx configuration",
|
|
||||||
err.message
|
|
||||||
)
|
|
||||||
})
|
|
Loading…
Reference in New Issue