diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml
index 6120290d0d..fd4d8cf7c8 100644
--- a/.github/workflows/budibase_ci.yml
+++ b/.github/workflows/budibase_ci.yml
@@ -64,10 +64,11 @@ jobs:
- run: yarn --frozen-lockfile
# Run build all the projects
- - name: Build
- run: |
- yarn build:oss
- yarn build:account-portal
+ - name: Build OSS
+ run: yarn build:oss
+ - name: Build account portal
+ run: yarn build:account-portal
+ if: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
# Check the types of the projects built via esbuild
- name: Check types
run: |
diff --git a/charts/budibase/README.md b/charts/budibase/README.md
index dea7d1dbae..207992087d 100644
--- a/charts/budibase/README.md
+++ b/charts/budibase/README.md
@@ -150,6 +150,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.apps.autoscaling.maxReplicas | int | `10` | |
| services.apps.autoscaling.minReplicas | int | `1` | |
| services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. |
+| services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. |
+| services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. |
+| services.apps.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main apps container. |
+| services.apps.extraVolumes | list | `[]` | Additional volumes to the apps pod. |
| services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. |
| services.apps.livenessProbe | object | HTTP health checks. | Liveness probe configuration for apps pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.apps.logLevel | string | `"info"` | The log level for the apps service. |
@@ -162,6 +166,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.automationWorkers.autoscaling.minReplicas | int | `1` | |
| services.automationWorkers.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the automation worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the automation worker pods. |
| services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. |
+| services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. |
+| services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. |
+| services.automationWorkers.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main automationWorkers container. |
+| services.automationWorkers.extraVolumes | list | `[]` | Additional volumes to the automationWorkers pod. |
| services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.automationWorkers.logLevel | string | `"info"` | The log level for the automation worker service. |
| services.automationWorkers.readinessProbe | object | HTTP health checks. | Readiness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: |
@@ -180,6 +188,9 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.objectStore.cloudfront.cdn | string | `""` | Set the url of a distribution to enable cloudfront. |
| services.objectStore.cloudfront.privateKey64 | string | `""` | Base64 encoded private key for the above public key. |
| services.objectStore.cloudfront.publicKeyId | string | `""` | ID of public key stored in cloudfront. |
+| services.objectStore.extraContainers | list | `[]` | Additional containers to be added to the objectStore pod. |
+| services.objectStore.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main objectStore container. |
+| services.objectStore.extraVolumes | list | `[]` | Additional volumes to the objectStore pod. |
| services.objectStore.minio | bool | `true` | Set to false if using another object store, such as S3. You will need to set `services.objectStore.url` to point to your bucket if you do this. |
| services.objectStore.region | string | `""` | AWS_REGION if using S3 |
| services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See for more information on how to set these. |
@@ -191,12 +202,19 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.proxy.autoscaling.maxReplicas | int | `10` | |
| services.proxy.autoscaling.minReplicas | int | `1` | |
| services.proxy.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the proxy service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the proxy pods. |
+| services.proxy.extraContainers | list | `[]` | |
+| services.proxy.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main proxy container. |
+| services.proxy.extraVolumes | list | `[]` | Additional volumes to the proxy pod. |
| services.proxy.livenessProbe | object | HTTP health checks. | Liveness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.proxy.readinessProbe | object | HTTP health checks. | Readiness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.proxy.replicaCount | int | `1` | The number of proxy replicas to run. |
| services.proxy.resources | object | `{}` | The resources to use for proxy pods. See for more information on how to set these. |
| services.proxy.startupProbe | object | HTTP health checks. | Startup probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.redis.enabled | bool | `true` | Whether or not to deploy a Redis pod into your cluster. |
+| services.redis.extraContainers | list | `[]` | Additional containers to be added to the redis pod. |
+| services.redis.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main redis container. |
+| services.redis.extraVolumes | list | `[]` | Additional volumes to the redis pod. |
+| services.redis.image | string | `"redis"` | The Redis image to use. |
| services.redis.password | string | `"budibase"` | The password to use when connecting to Redis. It's recommended that you change this from the default if you're running Redis in-cluster. |
| services.redis.port | int | `6379` | Port to expose Redis on. |
| services.redis.resources | object | `{}` | The resources to use for Redis pods. See for more information on how to set these. |
@@ -207,6 +225,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.worker.autoscaling.maxReplicas | int | `10` | |
| services.worker.autoscaling.minReplicas | int | `1` | |
| services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. |
+| services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. |
+| services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. |
+| services.worker.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main worker container. |
+| services.worker.extraVolumes | list | `[]` | Additional volumes to the worker pod. |
| services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. |
| services.worker.livenessProbe | object | HTTP health checks. | Liveness probe configuration for worker pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.worker.logLevel | string | `"info"` | The log level for the worker service. |
@@ -225,4 +247,4 @@ $ helm uninstall --namespace budibase budibase
```
----------------------------------------------
-Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)
+Autogenerated from chart metadata using [helm-docs v1.13.1](https://github.com/norwoodj/helm-docs/releases/v1.13.1)
diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml
index c7c4481122..b380908dd1 100644
--- a/charts/budibase/templates/app-service-deployment.yaml
+++ b/charts/budibase/templates/app-service-deployment.yaml
@@ -235,6 +235,13 @@ spec:
args:
{{- toYaml .Values.services.apps.args | nindent 10 }}
{{ end }}
+ {{ if .Values.services.apps.extraVolumeMounts }}
+ volumeMounts:
+ {{- toYaml .Values.services.apps.extraVolumeMounts | nindent 10 }}
+ {{- end }}
+ {{- if .Values.services.apps.extraContainers }}
+ {{- toYaml .Values.services.apps.extraContainers | nindent 6 }}
+ {{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
@@ -258,4 +265,8 @@ spec:
- name: ndots
value: {{ .Values.services.apps.ndots | quote }}
{{ end }}
+ {{ if .Values.services.apps.extraVolumes }}
+ volumes:
+ {{- toYaml .Values.services.apps.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
diff --git a/charts/budibase/templates/automation-worker-service-deployment.yaml b/charts/budibase/templates/automation-worker-service-deployment.yaml
index 36c3a8ffbf..51fa9ee4bb 100644
--- a/charts/budibase/templates/automation-worker-service-deployment.yaml
+++ b/charts/budibase/templates/automation-worker-service-deployment.yaml
@@ -235,6 +235,13 @@ spec:
args:
{{- toYaml .Values.services.automationWorkers.args | nindent 10 }}
{{ end }}
+ {{ if .Values.services.automationWorkers.extraVolumeMounts }}
+ volumeMounts:
+ {{- toYaml .Values.services.automationWorkers.extraVolumeMounts | nindent 10 }}
+ {{ end }}
+ {{- if .Values.services.automationWorkers.extraContainers }}
+ {{- toYaml .Values.services.automationWorkers.extraContainers | nindent 6 }}
+ {{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
@@ -258,5 +265,9 @@ spec:
- name: ndots
value: {{ .Values.services.automationWorkers.ndots | quote }}
{{ end }}
+ {{ if .Values.services.automationWorkers.extraVolumes }}
+ volumes:
+ {{- toYaml .Values.services.automationWorkers.extraVolumes | nindent 8 }}
+ {{ end }}
status: {}
{{- end }}
\ No newline at end of file
diff --git a/charts/budibase/templates/minio-service-deployment.yaml b/charts/budibase/templates/minio-service-deployment.yaml
index 28e8eb9991..901ead2b46 100644
--- a/charts/budibase/templates/minio-service-deployment.yaml
+++ b/charts/budibase/templates/minio-service-deployment.yaml
@@ -54,6 +54,12 @@ spec:
volumeMounts:
- mountPath: /data
name: minio-data
+ {{ if .Values.services.objectStore.extraVolumeMounts }}
+ {{- toYaml .Values.services.objectStore.extraVolumeMounts | nindent 8 }}
+ {{- end }}
+ {{- if .Values.services.objectStore.extraContainers }}
+ {{- toYaml .Values.services.objectStore.extraContainers | nindent 6 }}
+ {{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
@@ -75,5 +81,8 @@ spec:
- name: minio-data
persistentVolumeClaim:
claimName: minio-data
+ {{ if .Values.services.objectStore.extraVolumes }}
+ {{- toYaml .Values.services.objectStore.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
{{- end }}
diff --git a/charts/budibase/templates/proxy-service-deployment.yaml b/charts/budibase/templates/proxy-service-deployment.yaml
index 233028cafe..d5ea696431 100644
--- a/charts/budibase/templates/proxy-service-deployment.yaml
+++ b/charts/budibase/templates/proxy-service-deployment.yaml
@@ -82,7 +82,13 @@ spec:
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
+ {{ if .Values.services.proxy.extraVolumeMounts }}
volumeMounts:
+ {{- toYaml .Values.services.proxy.extraVolumeMounts | nindent 8 }}
+ {{- end }}
+ {{- if .Values.services.proxy.extraContainers }}
+ {{- toYaml .Values.services.proxy.extraContainers | nindent 6 }}
+ {{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
@@ -108,7 +114,10 @@ spec:
args:
{{- toYaml .Values.services.proxy.args | nindent 8 }}
{{ end }}
+ {{ if .Values.services.proxy.extraVolumes }}
volumes:
+ {{- toYaml .Values.services.proxy.extraVolumes | nindent 6 }}
+ {{ end }}
{{ if .Values.services.proxy.ndots }}
dnsConfig:
options:
diff --git a/charts/budibase/templates/redis-service-deployment.yaml b/charts/budibase/templates/redis-service-deployment.yaml
index bca40d2237..9ad12e0167 100644
--- a/charts/budibase/templates/redis-service-deployment.yaml
+++ b/charts/budibase/templates/redis-service-deployment.yaml
@@ -22,7 +22,7 @@ spec:
- redis-server
- --requirepass
- {{ .Values.services.redis.password }}
- image: redis
+ image: {{ .Values.services.redis.image }}
imagePullPolicy: ""
name: redis-service
ports:
@@ -34,6 +34,12 @@ spec:
volumeMounts:
- mountPath: /data
name: redis-data
+ {{ if .Values.services.redis.extraVolumeMounts }}
+ {{- toYaml .Values.services.redis.extraVolumeMounts | nindent 8 }}
+ {{- end }}
+ {{- if .Values.services.redis.extraContainers }}
+ {{- toYaml .Values.services.redis.extraContainers | nindent 6 }}
+ {{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
@@ -55,6 +61,9 @@ spec:
- name: redis-data
persistentVolumeClaim:
claimName: redis-data
+ {{ if .Values.services.redis.extraVolumes }}
+ {{- toYaml .Values.services.redis.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
{{- end }}
diff --git a/charts/budibase/templates/worker-service-deployment.yaml b/charts/budibase/templates/worker-service-deployment.yaml
index 2f97508ae3..e37b2bc0e4 100644
--- a/charts/budibase/templates/worker-service-deployment.yaml
+++ b/charts/budibase/templates/worker-service-deployment.yaml
@@ -221,6 +221,13 @@ spec:
args:
{{- toYaml .Values.services.worker.args | nindent 10 }}
{{ end }}
+ {{ if .Values.services.worker.extraVolumeMounts }}
+ volumeMounts:
+ {{- toYaml .Values.services.worker.extraVolumeMounts | nindent 10 }}
+ {{- end }}
+ {{- if .Values.services.worker.extraContainers }}
+ {{- toYaml .Values.services.worker.extraContainers | nindent 6 }}
+ {{- end }}
{{- with .Values.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
@@ -244,4 +251,8 @@ spec:
- name: ndots
value: {{ .Values.services.worker.ndots | quote }}
{{ end }}
+ {{ if .Values.services.worker.extraVolumes }}
+ volumes:
+ {{- toYaml .Values.services.worker.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml
index 19b6c22d6c..9ace768625 100644
--- a/charts/budibase/values.yaml
+++ b/charts/budibase/values.yaml
@@ -206,6 +206,20 @@ services:
# for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the proxy pods.
targetCPUUtilizationPercentage: 80
+ # -- Additional containers to be added to the proxy pod.
+ extraContainers: []
+ # - name: my-sidecar
+ # image: myimage:latest
+
+ # -- Additional volumeMounts to the main proxy container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the proxy pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
apps:
# @ignore (you shouldn't need to change this)
@@ -274,6 +288,20 @@ services:
# autoscaling to work, you will need to have metrics-server configured,
# and resources set for the apps pods.
targetCPUUtilizationPercentage: 80
+ # -- Additional containers to be added to the apps pod.
+ extraContainers: []
+ # - name: my-sidecar
+ # image: myimage:latest
+
+ # -- Additional volumeMounts to the main apps container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the apps pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
automationWorkers:
# -- Whether or not to enable the automation worker service. If you disable this,
@@ -346,6 +374,20 @@ services:
# Note that for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the automation worker pods.
targetCPUUtilizationPercentage: 80
+ # -- Additional containers to be added to the automationWorkers pod.
+ extraContainers: []
+ # - name: my-sidecar
+ # image: myimage:latest
+
+ # -- Additional volumeMounts to the main automationWorkers container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the automationWorkers pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
worker:
# @ignore (you shouldn't need to change this)
@@ -414,6 +456,20 @@ services:
# for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the worker pods.
targetCPUUtilizationPercentage: 80
+ # -- Additional containers to be added to the worker pod.
+ extraContainers: []
+ # - name: my-sidecar
+ # image: myimage:latest
+
+ # -- Additional volumeMounts to the main worker container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the worker pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
couchdb:
# -- Whether or not to spin up a CouchDB instance in your cluster. True by
@@ -440,6 +496,8 @@ services:
resources: {}
redis:
+ # -- The Redis image to use.
+ image: redis
# -- Whether or not to deploy a Redis pod into your cluster.
enabled: true
# -- Port to expose Redis on.
@@ -463,6 +521,20 @@ services:
#
# for more information on how to set these.
resources: {}
+ # -- Additional containers to be added to the redis pod.
+ extraContainers: []
+ # - name: my-sidecar
+ # image: myimage:latest
+
+ # -- Additional volumeMounts to the main redis container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the redis pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
objectStore:
# -- Set to false if using another object store, such as S3. You will need
@@ -488,7 +560,7 @@ services:
# do this.
url: "http://minio-service:9000"
# -- How much storage to give Minio in its PersistentVolumeClaim.
- storage: 100Mi
+ storage: 2Gi
# -- If defined, storageClassName: If set to "-",
# storageClassName: "", which disables dynamic provisioning If undefined
# (the default) or set to null, no storageClassName spec is set, choosing
@@ -505,6 +577,20 @@ services:
publicKeyId: ""
# -- Base64 encoded private key for the above public key.
privateKey64: ""
+ # -- Additional containers to be added to the objectStore pod.
+ extraContainers: []
+ # - name: my-sidecar
+ # image: myimage:latest
+
+ # -- Additional volumeMounts to the main objectStore container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the objectStore pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
# Override values in couchDB subchart. We're only specifying the values we're changing.
# If you want to see all of the available values, see:
diff --git a/lerna.json b/lerna.json
index 385d86209a..9839b8b166 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.23.3",
+ "version": "2.23.5",
"npmClient": "yarn",
"packages": [
"packages/*",
diff --git a/package.json b/package.json
index 2816247939..e520b7c2cf 100644
--- a/package.json
+++ b/package.json
@@ -56,6 +56,7 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
+ "dev:camunda": "./scripts/deploy-camunda.sh",
"dev:all": "yarn run kill-all && lerna run --stream dev",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
diff --git a/packages/account-portal b/packages/account-portal
index a0ee9cad8c..bd0e01d639 160000
--- a/packages/account-portal
+++ b/packages/account-portal
@@ -1 +1 @@
-Subproject commit a0ee9cad8cefb8f9f40228705711be174f018fa9
+Subproject commit bd0e01d639ec3b2547e7c859a1c43b622dce8344
diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/DownloadFile.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/DownloadFile.svelte
new file mode 100644
index 0000000000..babaf2815b
--- /dev/null
+++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/DownloadFile.svelte
@@ -0,0 +1,110 @@
+
+
+
+
+
+ {#if parameters.type === "attachment"}
+
+
+
+
diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js
index eb354d6557..587993377d 100644
--- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js
+++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/index.js
@@ -22,3 +22,4 @@ export { default as PromptUser } from "./PromptUser.svelte"
export { default as OpenSidePanel } from "./OpenSidePanel.svelte"
export { default as CloseSidePanel } from "./CloseSidePanel.svelte"
export { default as ClearRowSelection } from "./ClearRowSelection.svelte"
+export { default as DownloadFile } from "./DownloadFile.svelte"
diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json
index 9391baf3dc..2840a0d662 100644
--- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json
+++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/manifest.json
@@ -161,6 +161,11 @@
"name": "Clear Row Selection",
"type": "data",
"component": "ClearRowSelection"
+ },
+ {
+ "name": "Download File",
+ "type": "data",
+ "component": "DownloadFile"
}
]
}
diff --git a/packages/builder/src/components/start/ExportAppModal.svelte b/packages/builder/src/components/start/ExportAppModal.svelte
index ec0cf42fe0..3a995be7ae 100644
--- a/packages/builder/src/components/start/ExportAppModal.svelte
+++ b/packages/builder/src/components/start/ExportAppModal.svelte
@@ -8,6 +8,7 @@
Input,
notifications,
} from "@budibase/bbui"
+ import { downloadFile } from "@budibase/frontend-core"
import { createValidationStore } from "helpers/validation/yup"
export let app
@@ -55,40 +56,13 @@
const exportApp = async () => {
const id = published ? app.prodId : app.devId
const url = `/api/backups/export?appId=${id}`
- await downloadFile(url, {
- excludeRows: !includeInternalTablesRows,
- encryptPassword: password,
- })
- }
- async function downloadFile(url, body) {
try {
- const response = await fetch(url, {
- method: "POST",
- headers: {
- "Content-Type": "application/json",
- },
- body: JSON.stringify(body),
+ const downloaded = await downloadFile(url, {
+ excludeRows: !includeInternalTablesRows,
+ encryptPassword: password,
})
-
- if (response.ok) {
- const contentDisposition = response.headers.get("Content-Disposition")
-
- const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
- contentDisposition
- )
-
- const filename = matches[1].replace(/['"]/g, "")
-
- const url = URL.createObjectURL(await response.blob())
-
- const link = document.createElement("a")
- link.href = url
- link.download = filename
- link.click()
-
- URL.revokeObjectURL(url)
- } else {
+ if (!downloaded) {
notifications.error("Error exporting the app.")
}
} catch (error) {
diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js
index 68478b76ac..d883ee1b55 100644
--- a/packages/client/src/utils/buttonActions.js
+++ b/packages/client/src/utils/buttonActions.js
@@ -1,5 +1,6 @@
import { get } from "svelte/store"
import download from "downloadjs"
+import { downloadStream } from "@budibase/frontend-core"
import {
routeStore,
builderStore,
@@ -400,6 +401,51 @@ const closeSidePanelHandler = () => {
sidePanelStore.actions.close()
}
+const downloadFileHandler = async action => {
+ const { url, fileName } = action.parameters
+ try {
+ const { type } = action.parameters
+ if (type === "attachment") {
+ const { tableId, rowId, attachmentColumn } = action.parameters
+ const res = await API.downloadAttachment(
+ tableId,
+ rowId,
+ attachmentColumn,
+ { suppressErrors: true }
+ )
+ await downloadStream(res)
+ return
+ }
+
+ const response = await fetch(url)
+
+ if (!response.ok) {
+ notificationStore.actions.error(
+ `Failed to download from '${url}'. Server returned status code: ${response.status}`
+ )
+ return
+ }
+
+ const objectUrl = URL.createObjectURL(await response.blob())
+
+ const link = document.createElement("a")
+ link.href = objectUrl
+ link.download = fileName
+ link.click()
+
+ URL.revokeObjectURL(objectUrl)
+ } catch (e) {
+ console.error(e)
+ if (e.status) {
+ notificationStore.actions.error(
+ `Failed to download from '${url}'. Server returned status code: ${e.status}`
+ )
+ } else {
+ notificationStore.actions.error(`Failed to download from '${url}'.`)
+ }
+ }
+}
+
const handlerMap = {
["Fetch Row"]: fetchRowHandler,
["Save Row"]: saveRowHandler,
@@ -418,6 +464,7 @@ const handlerMap = {
["Prompt User"]: promptUserHandler,
["Open Side Panel"]: openSidePanelHandler,
["Close Side Panel"]: closeSidePanelHandler,
+ ["Download File"]: downloadFileHandler,
}
const confirmTextMap = {
diff --git a/packages/client/tsconfig.json b/packages/client/tsconfig.json
new file mode 100644
index 0000000000..81f1657f48
--- /dev/null
+++ b/packages/client/tsconfig.json
@@ -0,0 +1,17 @@
+{
+ "compilerOptions": {
+ "allowJs": true,
+ "strict": true,
+ "outDir": "dist",
+ "paths": {
+ "@budibase/*": [
+ "../*/src/index.ts",
+ "../*/src/index.js",
+ "../*",
+ "../../node_modules/@budibase/*"
+ ],
+ "*": ["./src/*"]
+ }
+ },
+ "include": ["src/**/*"]
+}
diff --git a/packages/frontend-core/src/api/attachments.js b/packages/frontend-core/src/api/attachments.js
index f79b461574..cff466c86f 100644
--- a/packages/frontend-core/src/api/attachments.js
+++ b/packages/frontend-core/src/api/attachments.js
@@ -88,5 +88,19 @@ export const buildAttachmentEndpoints = API => {
},
})
},
+
+ /**
+ * Download an attachment from a row given its column name.
+ * @param datasourceId the ID of the datasource to download from
+ * @param rowId the ID of the row to download from
+ * @param columnName the column name to download
+ */
+ downloadAttachment: async (datasourceId, rowId, columnName, options) => {
+ return await API.get({
+ url: `/api/${datasourceId}/rows/${rowId}/attachment/${columnName}`,
+ parseResponse: response => response,
+ suppressErrors: options?.suppressErrors,
+ })
+ },
}
}
diff --git a/packages/frontend-core/src/utils/download.js b/packages/frontend-core/src/utils/download.js
index 89c8572253..fb9990ff30 100644
--- a/packages/frontend-core/src/utils/download.js
+++ b/packages/frontend-core/src/utils/download.js
@@ -1,3 +1,5 @@
+const extractFileNameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/
+
export function downloadText(filename, text) {
if (typeof text === "object") {
text = JSON.stringify(text)
@@ -17,9 +19,7 @@ export async function downloadStream(streamResponse) {
const contentDisposition = streamResponse.headers.get("Content-Disposition")
- const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
- contentDisposition
- )
+ const matches = extractFileNameRegex.exec(contentDisposition)
const filename = matches[1].replace(/['"]/g, "")
@@ -34,3 +34,33 @@ export async function downloadStream(streamResponse) {
URL.revokeObjectURL(blobUrl)
}
+
+export async function downloadFile(url, body) {
+ const response = await fetch(url, {
+ method: "POST",
+ headers: {
+ "Content-Type": "application/json",
+ },
+ body: JSON.stringify(body),
+ })
+
+ if (!response.ok) {
+ return false
+ } else {
+ const contentDisposition = response.headers.get("Content-Disposition")
+
+ const matches = extractFileNameRegex.exec(contentDisposition)
+
+ const filename = matches[1].replace(/['"]/g, "")
+
+ const url = URL.createObjectURL(await response.blob())
+
+ const link = document.createElement("a")
+ link.href = url
+ link.download = filename
+ link.click()
+
+ URL.revokeObjectURL(url)
+ return true
+ }
+}
diff --git a/packages/server/package.json b/packages/server/package.json
index 59f06ea399..ad03033e67 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -50,10 +50,10 @@
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "0.0.0",
"@budibase/client": "0.0.0",
+ "@budibase/frontend-core": "0.0.0",
"@budibase/pro": "0.0.0",
"@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0",
- "@budibase/frontend-core": "0.0.0",
"@budibase/types": "0.0.0",
"@bull-board/api": "5.10.2",
"@bull-board/koa": "5.10.2",
@@ -63,6 +63,7 @@
"@socket.io/redis-adapter": "^8.2.1",
"airtable": "0.10.1",
"arangojs": "7.2.0",
+ "archiver": "7.0.1",
"aws-sdk": "2.1030.0",
"bcrypt": "5.1.0",
"bcryptjs": "2.4.3",
@@ -124,6 +125,7 @@
"@babel/preset-env": "7.16.11",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
+ "@types/archiver": "^6.0.2",
"@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.5",
diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts
index ceef421fab..6acdfcd465 100644
--- a/packages/server/src/api/controllers/application.ts
+++ b/packages/server/src/api/controllers/application.ts
@@ -320,6 +320,7 @@ async function performAppCreate(ctx: UserCtx) {
"theme",
"customTheme",
"icon",
+ "snippets",
]
keys.forEach(key => {
if (existing[key]) {
diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts
index 7dc70a3009..c3d1f2cb47 100644
--- a/packages/server/src/api/controllers/row/index.ts
+++ b/packages/server/src/api/controllers/row/index.ts
@@ -1,4 +1,8 @@
+import stream from "stream"
+import archiver from "archiver"
+
import { quotas } from "@budibase/pro"
+import { objectStore } from "@budibase/backend-core"
import * as internal from "./internal"
import * as external from "./external"
import { isExternalTableID } from "../../../integrations/utils"
@@ -9,10 +13,12 @@ import {
DeleteRows,
ExportRowsRequest,
ExportRowsResponse,
+ FieldType,
GetRowResponse,
PatchRowRequest,
PatchRowResponse,
Row,
+ RowAttachment,
RowSearchParams,
SearchRowRequest,
SearchRowResponse,
@@ -251,3 +257,59 @@ export const exportRows = async (
ctx.attachment(fileName)
ctx.body = apiFileReturn(content)
}
+
+export async function downloadAttachment(ctx: UserCtx) {
+ const { columnName } = ctx.params
+
+ const tableId = utils.getTableId(ctx)
+ const row = await pickApi(tableId).find(ctx)
+
+ const table = await sdk.tables.getTable(tableId)
+ const columnSchema = table.schema[columnName]
+ if (!columnSchema) {
+ ctx.throw(400, `'${columnName}' is not valid`)
+ }
+
+ const columnType = columnSchema.type
+
+ if (
+ columnType !== FieldType.ATTACHMENTS &&
+ columnType !== FieldType.ATTACHMENT_SINGLE
+ ) {
+ ctx.throw(404, `'${columnName}' is not valid attachment column`)
+ }
+
+ const attachments: RowAttachment[] =
+ columnType === FieldType.ATTACHMENTS ? row[columnName] : [row[columnName]]
+
+ if (!attachments?.length) {
+ ctx.throw(404)
+ }
+
+ if (attachments.length === 1) {
+ const attachment = attachments[0]
+ ctx.attachment(attachment.name)
+ ctx.body = await objectStore.getReadStream(
+ objectStore.ObjectStoreBuckets.APPS,
+ attachment.key
+ )
+ } else {
+ const passThrough = new stream.PassThrough()
+ const archive = archiver.create("zip")
+ archive.pipe(passThrough)
+
+ for (const attachment of attachments) {
+ const attachmentStream = await objectStore.getReadStream(
+ objectStore.ObjectStoreBuckets.APPS,
+ attachment.key
+ )
+ archive.append(attachmentStream, { name: attachment.name })
+ }
+
+ const displayName = row[table.primaryDisplay || "_id"]
+ ctx.attachment(`${displayName}_${columnName}.zip`)
+ archive.finalize()
+ ctx.body = passThrough
+ ctx.type = "zip"
+ }
+}
diff --git a/packages/server/src/api/routes/row.ts b/packages/server/src/api/routes/row.ts
index 516bfd20c6..f1aa39a461 100644
--- a/packages/server/src/api/routes/row.ts
+++ b/packages/server/src/api/routes/row.ts
@@ -77,6 +77,12 @@ router
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.exportRows
)
+ .get(
+ "/api/:sourceId/rows/:rowId/attachment/:columnName",
+ paramSubResource("sourceId", "rowId"),
+ authorized(PermissionType.TABLE, PermissionLevel.READ),
+ rowController.downloadAttachment
+ )
router.post(
"/api/v2/views/:viewId/search",
diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts
index 3fabbfbef9..5b71ec9044 100644
--- a/packages/server/src/api/routes/tests/search.spec.ts
+++ b/packages/server/src/api/routes/tests/search.spec.ts
@@ -6,10 +6,14 @@ import {
Datasource,
EmptyFilterOption,
FieldType,
- Row,
+ RowSearchParams,
SearchFilters,
+ SortOrder,
+ SortType,
Table,
+ TableSchema,
} from "@budibase/types"
+import _ from "lodash"
jest.unmock("mssql")
@@ -25,8 +29,8 @@ describe.each([
const config = setup.getConfig()
let envCleanup: (() => void) | undefined
- let table: Table
let datasource: Datasource | undefined
+ let table: Table
beforeAll(async () => {
if (isSqs) {
@@ -47,231 +51,503 @@ describe.each([
}
})
+ async function createTable(schema: TableSchema) {
+ table = await config.api.table.save(
+ tableForDatasource(datasource, { schema })
+ )
+ }
+
+ async function createRows(rows: Record[]) {
+ await Promise.all(rows.map(r => config.api.row.save(table._id!, r)))
+ }
+
+ class SearchAssertion {
+ constructor(private readonly query: RowSearchParams) {}
+
+ // Asserts that the query returns rows matching exactly the set of rows
+ // passed in. The order of the rows matters. Rows returned in an order
+ // different to the one passed in will cause the assertion to fail. Extra
+ // rows returned by the query will also cause the assertion to fail.
+ async toMatchExactly(expectedRows: any[]) {
+ const { rows: foundRows } = await config.api.row.search(table._id!, {
+ ...this.query,
+ tableId: table._id!,
+ })
+
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toHaveLength(expectedRows.length)
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toEqual(
+ expectedRows.map((expectedRow: any) =>
+ expect.objectContaining(
+ foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
+ )
+ )
+ )
+ }
+
+ // Asserts that the query returns rows matching exactly the set of rows
+ // passed in. The order of the rows is not important, but extra rows will
+ // cause the assertion to fail.
+ async toContainExactly(expectedRows: any[]) {
+ const { rows: foundRows } = await config.api.row.search(table._id!, {
+ ...this.query,
+ tableId: table._id!,
+ })
+
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toHaveLength(expectedRows.length)
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toEqual(
+ expect.arrayContaining(
+ expectedRows.map((expectedRow: any) =>
+ expect.objectContaining(
+ foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
+ )
+ )
+ )
+ )
+ }
+
+ // Asserts that the query returns rows matching the set of rows passed in.
+ // The order of the rows is not important. Extra rows will not cause the
+ // assertion to fail.
+ async toContain(expectedRows: any[]) {
+ const { rows: foundRows } = await config.api.row.search(table._id!, {
+ ...this.query,
+ tableId: table._id!,
+ })
+
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toEqual(
+ expect.arrayContaining(
+ expectedRows.map((expectedRow: any) =>
+ expect.objectContaining(
+ foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
+ )
+ )
+ )
+ )
+ }
+
+ async toFindNothing() {
+ await this.toContainExactly([])
+ }
+
+ async toHaveLength(length: number) {
+ const { rows: foundRows } = await config.api.row.search(table._id!, {
+ ...this.query,
+ tableId: table._id!,
+ })
+
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toHaveLength(length)
+ }
+ }
+
+ function expectSearch(query: Omit) {
+ return new SearchAssertion({ ...query, tableId: table._id! })
+ }
+
+ function expectQuery(query: SearchFilters) {
+ return expectSearch({ query })
+ }
+
describe("strings", () => {
beforeAll(async () => {
- table = await config.api.table.save(
- tableForDatasource(datasource, {
- schema: {
- name: {
- name: "name",
- type: FieldType.STRING,
- },
- },
- })
- )
+ await createTable({
+ name: { name: "name", type: FieldType.STRING },
+ })
+ await createRows([{ name: "foo" }, { name: "bar" }])
})
- const rows = [{ name: "foo" }, { name: "bar" }]
- let savedRows: Row[]
+ describe("misc", () => {
+ it("should return all if no query is passed", () =>
+ expectSearch({} as RowSearchParams).toContainExactly([
+ { name: "foo" },
+ { name: "bar" },
+ ]))
- beforeAll(async () => {
- savedRows = await Promise.all(
- rows.map(r => config.api.row.save(table._id!, r))
- )
+ it("should return all if empty query is passed", () =>
+ expectQuery({}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
+
+ it("should return all if onEmptyFilter is RETURN_ALL", () =>
+ expectQuery({
+ onEmptyFilter: EmptyFilterOption.RETURN_ALL,
+ }).toContainExactly([{ name: "foo" }, { name: "bar" }]))
+
+ it("should return nothing if onEmptyFilter is RETURN_NONE", () =>
+ expectQuery({
+ onEmptyFilter: EmptyFilterOption.RETURN_NONE,
+ }).toFindNothing())
+
+ it("should respect limit", () =>
+ expectSearch({ limit: 1, paginate: true, query: {} }).toHaveLength(1))
})
- interface StringSearchTest {
- query: SearchFilters
- expected: (typeof rows)[number][]
- }
+ describe("equal", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ equal: { name: "foo" } }).toContainExactly([
+ { name: "foo" },
+ ]))
- const stringSearchTests: StringSearchTest[] = [
- // These three test cases are generic and don't really need
- // to be repeated for all data types, so we just do them here.
- { query: {}, expected: rows },
- {
- query: { onEmptyFilter: EmptyFilterOption.RETURN_ALL },
- expected: rows,
- },
- {
- query: { onEmptyFilter: EmptyFilterOption.RETURN_NONE },
- expected: [],
- },
- // The rest of these tests are specific to strings.
- { query: { string: { name: "foo" } }, expected: [rows[0]] },
- { query: { string: { name: "none" } }, expected: [] },
- { query: { fuzzy: { name: "oo" } }, expected: [rows[0]] },
- { query: { equal: { name: "foo" } }, expected: [rows[0]] },
- { query: { notEqual: { name: "foo" } }, expected: [rows[1]] },
- { query: { oneOf: { name: ["foo"] } }, expected: [rows[0]] },
- ]
+ it("fails to find nonexistent row", () =>
+ expectQuery({ equal: { name: "none" } }).toFindNothing())
+ })
- it.each(stringSearchTests)(
- `should be able to run query: $query`,
- async ({ query, expected }) => {
- const { rows: foundRows } = await config.api.row.search(table._id!, {
- tableId: table._id!,
- query,
- })
- expect(foundRows).toHaveLength(expected.length)
- expect(foundRows).toEqual(
- expect.arrayContaining(
- expected.map(r =>
- expect.objectContaining(savedRows.find(sr => sr.name === r.name)!)
- )
- )
- )
- }
- )
+ describe("notEqual", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ notEqual: { name: "foo" } }).toContainExactly([
+ { name: "bar" },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ notEqual: { name: "bar" } }).toContainExactly([
+ { name: "foo" },
+ ]))
+ })
+
+ describe("oneOf", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([
+ { name: "foo" },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ oneOf: { name: ["none"] } }).toFindNothing())
+ })
+
+ describe("fuzzy", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([
+ { name: "foo" },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ fuzzy: { name: "none" } }).toFindNothing())
+ })
+
+ describe("range", () => {
+ it("successfully finds multiple rows", () =>
+ expectQuery({
+ range: { name: { low: "a", high: "z" } },
+ }).toContainExactly([{ name: "bar" }, { name: "foo" }]))
+
+ it("successfully finds a row with a high bound", () =>
+ expectQuery({
+ range: { name: { low: "a", high: "c" } },
+ }).toContainExactly([{ name: "bar" }]))
+
+ it("successfully finds a row with a low bound", () =>
+ expectQuery({
+ range: { name: { low: "f", high: "z" } },
+ }).toContainExactly([{ name: "foo" }]))
+
+ it("successfully finds no rows", () =>
+ expectQuery({
+ range: { name: { low: "g", high: "h" } },
+ }).toFindNothing())
+ })
+
+ describe("sort", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
+
+ describe("sortType STRING", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
+ })
+ })
})
- describe("number", () => {
+ describe("numbers", () => {
beforeAll(async () => {
- table = await config.api.table.save(
- tableForDatasource(datasource, {
- schema: {
- age: {
- name: "age",
- type: FieldType.NUMBER,
- },
- },
- })
- )
+ await createTable({
+ age: { name: "age", type: FieldType.NUMBER },
+ })
+ await createRows([{ age: 1 }, { age: 10 }])
})
- const rows = [{ age: 1 }, { age: 10 }]
- let savedRows: Row[]
+ describe("equal", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }]))
- beforeAll(async () => {
- savedRows = await Promise.all(
- rows.map(r => config.api.row.save(table._id!, r))
- )
+ it("fails to find nonexistent row", () =>
+ expectQuery({ equal: { age: 2 } }).toFindNothing())
})
- interface NumberSearchTest {
- query: SearchFilters
- expected: (typeof rows)[number][]
- }
+ describe("notEqual", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ notEqual: { age: 1 } }).toContainExactly([{ age: 10 }]))
- const numberSearchTests: NumberSearchTest[] = [
- { query: { equal: { age: 1 } }, expected: [rows[0]] },
- { query: { equal: { age: 2 } }, expected: [] },
- { query: { notEqual: { age: 1 } }, expected: [rows[1]] },
- { query: { oneOf: { age: [1] } }, expected: [rows[0]] },
- { query: { range: { age: { low: 1, high: 5 } } }, expected: [rows[0]] },
- { query: { range: { age: { low: 0, high: 1 } } }, expected: [rows[0]] },
- { query: { range: { age: { low: 3, high: 4 } } }, expected: [] },
- { query: { range: { age: { low: 0, high: 11 } } }, expected: rows },
- ]
+ it("fails to find nonexistent row", () =>
+ expectQuery({ notEqual: { age: 10 } }).toContainExactly([{ age: 1 }]))
+ })
- it.each(numberSearchTests)(
- `should be able to run query: $query`,
- async ({ query, expected }) => {
- const { rows: foundRows } = await config.api.row.search(table._id!, {
- tableId: table._id!,
- query,
- })
- expect(foundRows).toHaveLength(expected.length)
- expect(foundRows).toEqual(
- expect.arrayContaining(
- expected.map(r =>
- expect.objectContaining(savedRows.find(sr => sr.age === r.age)!)
- )
- )
- )
- }
- )
+ describe("oneOf", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ oneOf: { age: [1] } }).toContainExactly([{ age: 1 }]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ oneOf: { age: [2] } }).toFindNothing())
+ })
+
+ describe("range", () => {
+ it("successfully finds a row", () =>
+ expectQuery({
+ range: { age: { low: 1, high: 5 } },
+ }).toContainExactly([{ age: 1 }]))
+
+ it("successfully finds multiple rows", () =>
+ expectQuery({
+ range: { age: { low: 1, high: 10 } },
+ }).toContainExactly([{ age: 1 }, { age: 10 }]))
+
+ it("successfully finds a row with a high bound", () =>
+ expectQuery({
+ range: { age: { low: 5, high: 10 } },
+ }).toContainExactly([{ age: 10 }]))
+
+ it("successfully finds no rows", () =>
+ expectQuery({
+ range: { age: { low: 5, high: 9 } },
+ }).toFindNothing())
+ })
+
+ describe("sort", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ age: 1 }, { age: 10 }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ age: 10 }, { age: 1 }]))
+ })
+
+ describe("sortType NUMBER", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortType: SortType.NUMBER,
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ age: 1 }, { age: 10 }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortType: SortType.NUMBER,
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ age: 10 }, { age: 1 }]))
+ })
})
describe("dates", () => {
- beforeEach(async () => {
- table = await config.api.table.save(
- tableForDatasource(datasource, {
- schema: {
- dob: {
- name: "dob",
- type: FieldType.DATETIME,
- },
- },
- })
- )
+ const JAN_1ST = "2020-01-01T00:00:00.000Z"
+ const JAN_2ND = "2020-01-02T00:00:00.000Z"
+ const JAN_5TH = "2020-01-05T00:00:00.000Z"
+ const JAN_9TH = "2020-01-09T00:00:00.000Z"
+ const JAN_10TH = "2020-01-10T00:00:00.000Z"
+
+ beforeAll(async () => {
+ await createTable({
+ dob: { name: "dob", type: FieldType.DATETIME },
+ })
+
+ await createRows([{ dob: JAN_1ST }, { dob: JAN_10TH }])
})
- const rows = [
- { dob: new Date("2020-01-01").toISOString() },
- { dob: new Date("2020-01-10").toISOString() },
- ]
- let savedRows: Row[]
+ describe("equal", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([
+ { dob: JAN_1ST },
+ ]))
- beforeEach(async () => {
- savedRows = await Promise.all(
- rows.map(r => config.api.row.save(table._id!, r))
- )
+ it("fails to find nonexistent row", () =>
+ expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing())
})
- interface DateSearchTest {
- query: SearchFilters
- expected: (typeof rows)[number][]
- }
+ describe("notEqual", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([
+ { dob: JAN_10TH },
+ ]))
- const dateSearchTests: DateSearchTest[] = [
- {
- query: { equal: { dob: new Date("2020-01-01").toISOString() } },
- expected: [rows[0]],
- },
- {
- query: { equal: { dob: new Date("2020-01-02").toISOString() } },
- expected: [],
- },
- {
- query: { notEqual: { dob: new Date("2020-01-01").toISOString() } },
- expected: [rows[1]],
- },
- {
- query: { oneOf: { dob: [new Date("2020-01-01").toISOString()] } },
- expected: [rows[0]],
- },
- {
- query: {
- range: {
- dob: {
- low: new Date("2020-01-01").toISOString(),
- high: new Date("2020-01-05").toISOString(),
- },
- },
- },
- expected: [rows[0]],
- },
- {
- query: {
- range: {
- dob: {
- low: new Date("2020-01-01").toISOString(),
- high: new Date("2020-01-10").toISOString(),
- },
- },
- },
- expected: rows,
- },
- {
- query: {
- range: {
- dob: {
- low: new Date("2020-01-05").toISOString(),
- high: new Date("2020-01-10").toISOString(),
- },
- },
- },
- expected: [rows[1]],
- },
- ]
+ it("fails to find nonexistent row", () =>
+ expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([
+ { dob: JAN_1ST },
+ ]))
+ })
- it.each(dateSearchTests)(
- `should be able to run query: $query`,
- async ({ query, expected }) => {
- const { rows: foundRows } = await config.api.row.search(table._id!, {
- tableId: table._id!,
- query,
- })
- expect(foundRows).toHaveLength(expected.length)
- expect(foundRows).toEqual(
- expect.arrayContaining(
- expected.map(r =>
- expect.objectContaining(savedRows.find(sr => sr.dob === r.dob)!)
- )
- )
- )
- }
- )
+ describe("oneOf", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([
+ { dob: JAN_1ST },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing())
+ })
+
+ describe("range", () => {
+ it("successfully finds a row", () =>
+ expectQuery({
+ range: { dob: { low: JAN_1ST, high: JAN_5TH } },
+ }).toContainExactly([{ dob: JAN_1ST }]))
+
+ it("successfully finds multiple rows", () =>
+ expectQuery({
+ range: { dob: { low: JAN_1ST, high: JAN_10TH } },
+ }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
+
+ it("successfully finds a row with a high bound", () =>
+ expectQuery({
+ range: { dob: { low: JAN_5TH, high: JAN_10TH } },
+ }).toContainExactly([{ dob: JAN_10TH }]))
+
+ it("successfully finds no rows", () =>
+ expectQuery({
+ range: { dob: { low: JAN_5TH, high: JAN_9TH } },
+ }).toFindNothing())
+ })
+
+ describe("sort", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
+
+ describe("sortType STRING", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
+ })
+ })
+ })
+
+ describe("array of strings", () => {
+ beforeAll(async () => {
+ await createTable({
+ numbers: {
+ name: "numbers",
+ type: FieldType.ARRAY,
+ constraints: { inclusion: ["one", "two", "three"] },
+ },
+ })
+ await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
+ })
+
+ describe("contains", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ contains: { numbers: ["none"] } }).toFindNothing())
+
+ it("fails to find row containing all", () =>
+ expectQuery({
+ contains: { numbers: ["one", "two", "three"] },
+ }).toFindNothing())
+
+ it("finds all with empty list", () =>
+ expectQuery({ contains: { numbers: [] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+ })
+
+ describe("notContains", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ notContains: { numbers: ["one"] } }).toContainExactly([
+ { numbers: ["three"] },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({
+ notContains: { numbers: ["one", "two", "three"] },
+ }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+
+ it("finds all with empty list", () =>
+ expectQuery({ notContains: { numbers: [] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+ })
+
+ describe("containsAny", () => {
+ it("successfully finds rows", () =>
+ expectQuery({
+ containsAny: { numbers: ["one", "two", "three"] },
+ }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ containsAny: { numbers: ["none"] } }).toFindNothing())
+
+ it("finds all with empty list", () =>
+ expectQuery({ containsAny: { numbers: [] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+ })
})
})
diff --git a/packages/server/src/constants/index.ts b/packages/server/src/constants/index.ts
index 42a1b53224..37c275c8a3 100644
--- a/packages/server/src/constants/index.ts
+++ b/packages/server/src/constants/index.ts
@@ -20,6 +20,7 @@ export enum FilterTypes {
NOT_EMPTY = "notEmpty",
CONTAINS = "contains",
NOT_CONTAINS = "notContains",
+ CONTAINS_ANY = "containsAny",
ONE_OF = "oneOf",
}
@@ -30,6 +31,7 @@ export const NoEmptyFilterStrings = [
FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS,
+ FilterTypes.CONTAINS_ANY,
]
export const CanSwitchTypes = [
diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts
index f5828f9419..259abec106 100644
--- a/packages/server/src/integrations/base/sql.ts
+++ b/packages/server/src/integrations/base/sql.ts
@@ -233,6 +233,11 @@ class InternalBuilder {
(statement ? andOr : "") +
`LOWER(${likeKey(this.client, key)}) LIKE ?`
}
+
+ if (statement === "") {
+ return
+ }
+
// @ts-ignore
query = query[rawFnc](`${not}(${statement})`, value)
})
diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts
index f681bfeb90..5a016c821f 100644
--- a/packages/server/src/sdk/app/rows/search.ts
+++ b/packages/server/src/sdk/app/rows/search.ts
@@ -29,6 +29,10 @@ function pickApi(tableId: any) {
return internal
}
+function isEmptyArray(value: any) {
+ return Array.isArray(value) && value.length === 0
+}
+
// don't do a pure falsy check, as 0 is included
// https://github.com/Budibase/budibase/issues/10118
export function removeEmptyFilters(filters: SearchFilters) {
@@ -47,7 +51,7 @@ export function removeEmptyFilters(filters: SearchFilters) {
for (let [key, value] of Object.entries(
filters[filterType] as object
)) {
- if (value == null || value === "") {
+ if (value == null || value === "" || isEmptyArray(value)) {
// @ts-ignore
delete filters[filterField][key]
}
diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts
index 5b0b6e3bc7..7abd7d9e72 100644
--- a/packages/server/src/sdk/app/rows/search/sqs.ts
+++ b/packages/server/src/sdk/app/rows/search/sqs.ts
@@ -132,7 +132,7 @@ export async function search(
type: "row",
}
- if (params.sort && !params.sortType) {
+ if (params.sort) {
const sortField = table.schema[params.sort]
const sortType =
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
diff --git a/packages/string-templates/src/index.ts b/packages/string-templates/src/index.ts
index 1ac91edd28..847567cb5a 100644
--- a/packages/string-templates/src/index.ts
+++ b/packages/string-templates/src/index.ts
@@ -12,11 +12,12 @@ import {
} from "./utilities"
import { convertHBSBlock } from "./conversion"
import { setJSRunner, removeJSRunner } from "./helpers/javascript"
-import { helpersToRemoveForJs } from "./helpers/list"
import manifest from "./manifest.json"
import { ProcessOptions } from "./types"
+export { helpersToRemoveForJs } from "./helpers/list"
+export { FIND_ANY_HBS_REGEX } from "./utilities"
export { setJSRunner, setOnErrorLog } from "./helpers/javascript"
export { iifeWrapper } from "./iife"
@@ -412,15 +413,9 @@ export function convertToJS(hbs: string) {
return `${varBlock}${js}`
}
-const _FIND_ANY_HBS_REGEX = FIND_ANY_HBS_REGEX
-export { _FIND_ANY_HBS_REGEX as FIND_ANY_HBS_REGEX }
-
export { JsErrorTimeout } from "./errors"
-const _helpersToRemoveForJs = helpersToRemoveForJs
-export { _helpersToRemoveForJs as helpersToRemoveForJs }
-
-function defaultJSSetup() {
+export function defaultJSSetup() {
if (!isBackendService()) {
/**
* Use polyfilled vm to run JS scripts in a browser Env
@@ -440,6 +435,3 @@ function defaultJSSetup() {
}
}
defaultJSSetup()
-
-const _defaultJSSetup = defaultJSSetup
-export { _defaultJSSetup as defaultJSSetup }
diff --git a/packages/string-templates/test/basic.spec.ts b/packages/string-templates/test/basic.spec.ts
index 00058d4ecd..ddea54c2bf 100644
--- a/packages/string-templates/test/basic.spec.ts
+++ b/packages/string-templates/test/basic.spec.ts
@@ -126,6 +126,63 @@ describe("Test that the object processing works correctly", () => {
})
})
+describe("check arrays", () => {
+ describe("index with square brackets", () => {
+ it.each([
+ [0, "1"],
+ [1, "2"],
+ ])("should handle an array of primitive types", async (index, expected) => {
+ const json = [1, 2, 3]
+ const output = await processString(`{{ testing.[${index}] }}`, {
+ testing: json,
+ })
+ expect(output).toEqual(expected)
+ })
+
+ it("should handle an array of objects", async () => {
+ const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
+ const output = await processString("{{ testing.[1] }}", {
+ testing: json,
+ })
+ expect(output).toEqual('{"value":2}')
+ })
+
+ it("should handle nesting properties in an array of objects", async () => {
+ const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
+ const output = await processString("{{ testing.[1].value }}", {
+ testing: json,
+ })
+ expect(output).toEqual("2")
+ })
+ })
+
+ describe("index without square brackets", () => {
+ it("should not handle an array of primitive types", async () => {
+ const json = [1, 2, 3]
+ const output = await processString(`{{ testing.1 }}`, {
+ testing: json,
+ })
+ expect(output).toEqual("{{ testing.1 }}")
+ })
+
+ it("should not handle an array of objects", async () => {
+ const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
+ const output = await processString("{{ testing.1 }}", {
+ testing: json,
+ })
+ expect(output).toEqual("{{ testing.1 }}")
+ })
+
+ it("should handle nesting properties in an array of object types", async () => {
+ const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
+ const output = await processString("{{ testing.1.value }}", {
+ testing: json,
+ })
+ expect(output).toEqual("2")
+ })
+ })
+})
+
describe("check returning objects", () => {
it("should handle an array of objects", async () => {
const json = [{ a: 1 }, { a: 2 }]
diff --git a/packages/types/src/documents/account/account.ts b/packages/types/src/documents/account/account.ts
index 2f74b9e7b3..239d845722 100644
--- a/packages/types/src/documents/account/account.ts
+++ b/packages/types/src/documents/account/account.ts
@@ -102,6 +102,7 @@ export function isVerifiableSSOProvider(provider: AccountSSOProvider): boolean {
}
export interface AccountSSO {
+ ssoId?: string
provider: AccountSSOProvider
providerType: AccountSSOProviderType
oauth2?: OAuthTokens
diff --git a/packages/types/src/documents/app/row.ts b/packages/types/src/documents/app/row.ts
index 222c346591..865ab4ba64 100644
--- a/packages/types/src/documents/app/row.ts
+++ b/packages/types/src/documents/app/row.ts
@@ -1,22 +1,111 @@
import { Document } from "../document"
export enum FieldType {
+ /**
+ * a primitive type, stores a string, called Text within Budibase. This is one of the default
+ * types of Budibase, if an external type is not fully understood, we will treat it as text.
+ */
STRING = "string",
+ /**
+ * similar to string type, called Long Form Text within Budibase. This is mainly a frontend
+ * orientated type which enables a larger text input area. This can also be used
+ * in conjunction with the 'useRichText' option to support a markdown editor/viewer.
+ */
LONGFORM = "longform",
+ /**
+ * similar to string type, called Options within Budibase. This works very similarly to
+ * the string type within the backend, but is validated to a list of options. This will
+ * display a input within the builder/client.
+ */
OPTIONS = "options",
+ /**
+ * a primitive type, stores a number, as a floating point, called Number within Budibase.
+ * this type will always represent numbers as reals/floating point - there is no integer only
+ * type within Budibase.
+ */
NUMBER = "number",
+ /**
+ * a primitive type, stores a boolean, called Boolean within Budibase. This is often represented
+ * as a toggle or checkbox within forms/grids.
+ */
BOOLEAN = "boolean",
+ /**
+ * a JSON type, this type is always an array of strings, called Multi-select within Budibase.
+ * This type can be compared to the options type, as it functions similarly, but allows picking
+ * multiple options rather than a single option.
+ */
ARRAY = "array",
+ /**
+ * a string type, this is always a string when input/returned from the API, called Date/Time within
+ * Budibase. We utilise ISO date strings for representing dates, this type has a range of subtypes
+ * to restrict it to date only, time only and ignore timezone capabilities.
+ */
DATETIME = "datetime",
+ /**
+ * a JSON type, an array of metadata about files held in object storage, called Attachment List within
+ * Budibase. To utilise this type there is an API for uploading files to Budibase, which returns metadata
+ * that can be stored against columns of this type. Currently this is not supported on external databases.
+ */
ATTACHMENTS = "attachment",
+ /**
+ * a JSON type, similar to the attachments type, called Attachment within Budibase. This type functions
+ * much the same as the attachment list, but only holds a single attachment metadata as an object.
+ * This simplifies the binding experience of using this column type.
+ */
ATTACHMENT_SINGLE = "attachment_single",
+ /**
+ * a complex type, called Relationships within Budibase. This is the most complex type of Budibase,
+ * nothing should be stored against rows under link columns; this type simply represents the
+ * relationship between tables as part of the table schema. When rows are input to the Budibase API
+ * relationships to be made are represented as a list of row IDs to link. When rows are returned
+ * from the Budibase API it will contain a list of row IDs and display column values of the related rows.
+ */
LINK = "link",
+ /**
+ * a complex type, called Formulas within Budibase. This type has two variants, static and dynamic, with
+ * static only being supported against internal tables. Dynamic formulas calculate a provided HBS/JS binding
+ * based on the row context and enrich it when rows are being returned from the API. Static bindings calculate
+ * this when rows are being stored, so that the formula output can be searched upon within the DB.
+ */
FORMULA = "formula",
+ /**
+ * a complex type, called Auto Column within Budibase. This type has a few variants, with options such as a
+ * date for created at/updated at, an auto ID column with auto-increments as rows are saved and a user
+ * relationship type which stores the created by/updated by user details. These subtypes all depend on the
+ * date, number of link types respectively. There is one case where these will be executed in the browser,
+ * that is part of the initial formula definition, the formula will be live evaluated in the browser.
+ */
AUTO = "auto",
+ /**
+ * a JSON type, called JSON within Budibase. This type allows any arbitrary JSON to be input to this column
+ * type, which will be represented as a JSON object in the row. This type depends on a schema being
+ * provided to make the JSON searchable/bindable, the JSON cannot be fully dynamic.
+ */
JSON = "json",
+ /**
+ * @deprecated an internal type, this is an old deprecated type which is no longer used - still represented to note it
+ * could appear in very old tables.
+ */
INTERNAL = "internal",
+ /**
+ * a string type, called Barcode/QR within Budibase. This type is used to denote to forms to that this column
+ * should be filled in using a camera to read a barcode, there is a form component which will be used when this
+ * type is found. The column will contain the contents of any barcode scanned.
+ */
BARCODEQR = "barcodeqr",
+ /**
+ * a string type, this allows representing very large integers, but they are held/managed within Budibase as
+ * strings. When stored in external databases Budibase will attempt to use a real big integer type and depend
+ * on the database parsing the string to this type as part of saving.
+ */
BIGINT = "bigint",
+ /**
+ * a JSON type, called User within Budibase. This type is used to represent a link to an internal Budibase
+ * resource, like a user or group, today only users are supported. This type will be represented as an
+ * array of internal resource IDs (e.g. user IDs) within the row - this ID list will be enriched with
+ * the full resources when rows are returned from the API. The full resources can be input to the API, or
+ * an array of resource IDs, the API will squash these down and validate them before saving the row.
+ */
BB_REFERENCE = "bb_reference",
}
diff --git a/scripts/deploy-camunda.sh b/scripts/deploy-camunda.sh
new file mode 100755
index 0000000000..d01ed64b5a
--- /dev/null
+++ b/scripts/deploy-camunda.sh
@@ -0,0 +1,31 @@
+#!/bin/bash
+yarn global add zbctl
+export ZEEBE_ADDRESS='localhost:26500'
+
+cd ../budibase-bpm
+
+is_camunda_ready() {
+ if (zbctl --insecure status 2>/dev/null) | grep -q 'Healthy'; then
+ return 1
+ else
+ return 0
+ fi
+}
+
+docker-compose up -d
+echo "waiting for Camunda to be ready..."
+
+while is_camunda_ready -eq 0; do sleep 1; done
+
+cd src/main/resources/models
+
+echo "deploy processes..."
+zbctl deploy resource offboarding.bpmn --insecure
+zbctl deploy resource onboarding.bpmn --insecure
+
+cd ../../../../../budibase/packages/account-portal/packages/server
+
+yarn worker:run & cd ../../../.. && yarn dev:accountportal
+
+
+
diff --git a/yarn.lock b/yarn.lock
index 3ac0351e64..a36b54d3be 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -5174,6 +5174,13 @@
dependencies:
"@types/node" "*"
+"@types/archiver@^6.0.2":
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-6.0.2.tgz#0daf8c83359cbde69de1e4b33dcade6a48a929e2"
+ integrity sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==
+ dependencies:
+ "@types/readdir-glob" "*"
+
"@types/aria-query@^5.0.1":
version "5.0.1"
resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-5.0.1.tgz#3286741fb8f1e1580ac28784add4c7a1d49bdfbc"
@@ -5923,6 +5930,13 @@
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
+"@types/readdir-glob@*":
+ version "1.1.5"
+ resolved "https://registry.yarnpkg.com/@types/readdir-glob/-/readdir-glob-1.1.5.tgz#21a4a98898fc606cb568ad815f2a0eedc24d412a"
+ integrity sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==
+ dependencies:
+ "@types/node" "*"
+
"@types/redis@^2.8.0":
version "2.8.32"
resolved "https://registry.yarnpkg.com/@types/redis/-/redis-2.8.32.tgz#1d3430219afbee10f8cfa389dad2571a05ecfb11"
@@ -6993,6 +7007,32 @@ archiver-utils@^3.0.4:
normalize-path "^3.0.0"
readable-stream "^3.6.0"
+archiver-utils@^5.0.0, archiver-utils@^5.0.2:
+ version "5.0.2"
+ resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-5.0.2.tgz#63bc719d951803efc72cf961a56ef810760dd14d"
+ integrity sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==
+ dependencies:
+ glob "^10.0.0"
+ graceful-fs "^4.2.0"
+ is-stream "^2.0.1"
+ lazystream "^1.0.0"
+ lodash "^4.17.15"
+ normalize-path "^3.0.0"
+ readable-stream "^4.0.0"
+
+archiver@7.0.1:
+ version "7.0.1"
+ resolved "https://registry.yarnpkg.com/archiver/-/archiver-7.0.1.tgz#c9d91c350362040b8927379c7aa69c0655122f61"
+ integrity sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==
+ dependencies:
+ archiver-utils "^5.0.2"
+ async "^3.2.4"
+ buffer-crc32 "^1.0.0"
+ readable-stream "^4.0.0"
+ readdir-glob "^1.1.2"
+ tar-stream "^3.0.0"
+ zip-stream "^6.0.1"
+
archiver@^5.3.2:
version "5.3.2"
resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.2.tgz#99991d5957e53bd0303a392979276ac4ddccf3b0"
@@ -7794,6 +7834,11 @@ buffer-crc32@^0.2.1, buffer-crc32@^0.2.13, buffer-crc32@~0.2.3:
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==
+buffer-crc32@^1.0.0:
+ version "1.0.0"
+ resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-1.0.0.tgz#a10993b9055081d55304bd9feb4a072de179f405"
+ integrity sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==
+
buffer-equal-constant-time@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
@@ -8630,6 +8675,17 @@ compress-commons@^4.1.2:
normalize-path "^3.0.0"
readable-stream "^3.6.0"
+compress-commons@^6.0.2:
+ version "6.0.2"
+ resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-6.0.2.tgz#26d31251a66b9d6ba23a84064ecd3a6a71d2609e"
+ integrity sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==
+ dependencies:
+ crc-32 "^1.2.0"
+ crc32-stream "^6.0.0"
+ is-stream "^2.0.1"
+ normalize-path "^3.0.0"
+ readable-stream "^4.0.0"
+
compressible@^2.0.0, compressible@^2.0.12:
version "2.0.18"
resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
@@ -8922,6 +8978,14 @@ crc32-stream@^4.0.2:
crc-32 "^1.2.0"
readable-stream "^3.4.0"
+crc32-stream@^6.0.0:
+ version "6.0.0"
+ resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-6.0.0.tgz#8529a3868f8b27abb915f6c3617c0fadedbf9430"
+ integrity sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==
+ dependencies:
+ crc-32 "^1.2.0"
+ readable-stream "^4.0.0"
+
crc@^3.4.4:
version "3.8.0"
resolved "https://registry.yarnpkg.com/crc/-/crc-3.8.0.tgz#ad60269c2c856f8c299e2c4cc0de4556914056c6"
@@ -11800,6 +11864,17 @@ glob@7.1.6:
once "^1.3.0"
path-is-absolute "^1.0.0"
+glob@^10.0.0:
+ version "10.3.12"
+ resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.12.tgz#3a65c363c2e9998d220338e88a5f6ac97302960b"
+ integrity sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==
+ dependencies:
+ foreground-child "^3.1.0"
+ jackspeak "^2.3.6"
+ minimatch "^9.0.1"
+ minipass "^7.0.4"
+ path-scurry "^1.10.2"
+
glob@^10.2.2:
version "10.2.7"
resolved "https://registry.yarnpkg.com/glob/-/glob-10.2.7.tgz#9dd2828cd5bc7bd861e7738d91e7113dda41d7d8"
@@ -13186,7 +13261,7 @@ is-stream@^1.1.0:
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==
-is-stream@^2.0.0:
+is-stream@^2.0.0, is-stream@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
@@ -13432,6 +13507,15 @@ jackspeak@^2.0.3:
optionalDependencies:
"@pkgjs/parseargs" "^0.11.0"
+jackspeak@^2.3.6:
+ version "2.3.6"
+ resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8"
+ integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==
+ dependencies:
+ "@isaacs/cliui" "^8.0.2"
+ optionalDependencies:
+ "@pkgjs/parseargs" "^0.11.0"
+
jake@^10.8.5:
version "10.8.5"
resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46"
@@ -15209,6 +15293,11 @@ lowercase-keys@^2.0.0:
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
+lru-cache@^10.2.0:
+ version "10.2.0"
+ resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.0.tgz#0bd445ca57363465900f4d1f9bd8db343a4d95c3"
+ integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==
+
lru-cache@^4.1.5:
version "4.1.5"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd"
@@ -15834,6 +15923,11 @@ minipass@^5.0.0:
resolved "https://registry.yarnpkg.com/minipass/-/minipass-6.0.2.tgz#542844b6c4ce95b202c0995b0a471f1229de4c81"
integrity sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w==
+"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4:
+ version "7.0.4"
+ resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c"
+ integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==
+
minizlib@^2.0.0, minizlib@^2.1.1, minizlib@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
@@ -17392,6 +17486,14 @@ path-parser@^6.1.0:
search-params "3.0.0"
tslib "^1.10.0"
+path-scurry@^1.10.2:
+ version "1.10.2"
+ resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.2.tgz#8f6357eb1239d5fa1da8b9f70e9c080675458ba7"
+ integrity sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==
+ dependencies:
+ lru-cache "^10.2.0"
+ minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
+
path-scurry@^1.6.1, path-scurry@^1.7.0:
version "1.9.2"
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.9.2.tgz#90f9d296ac5e37e608028e28a447b11d385b3f63"
@@ -21037,7 +21139,7 @@ tar-stream@^2.0.0, tar-stream@^2.1.4, tar-stream@^2.2.0, tar-stream@~2.2.0:
inherits "^2.0.3"
readable-stream "^3.1.1"
-tar-stream@^3.1.5:
+tar-stream@^3.0.0, tar-stream@^3.1.5:
version "3.1.7"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b"
integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==
@@ -22941,3 +23043,12 @@ zip-stream@^4.1.0:
archiver-utils "^3.0.4"
compress-commons "^4.1.2"
readable-stream "^3.6.0"
+
+zip-stream@^6.0.1:
+ version "6.0.1"
+ resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-6.0.1.tgz#e141b930ed60ccaf5d7fa9c8260e0d1748a2bbfb"
+ integrity sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==
+ dependencies:
+ archiver-utils "^5.0.0"
+ compress-commons "^6.0.2"
+ readable-stream "^4.0.0"