Merge branch 'master' into fix/clear-relationships-automation
This commit is contained in:
commit
cd5af7635a
|
@ -42,7 +42,15 @@
|
|||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
"varsIgnorePattern": "^_",
|
||||
"argsIgnorePattern": "^_",
|
||||
"destructuredArrayIgnorePattern": "^_",
|
||||
"ignoreRestSiblings": true
|
||||
}
|
||||
],
|
||||
"local-rules/no-budibase-imports": "error"
|
||||
}
|
||||
},
|
||||
|
@ -59,7 +67,15 @@
|
|||
},
|
||||
"rules": {
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": "error",
|
||||
"@typescript-eslint/no-unused-vars": [
|
||||
"error",
|
||||
{
|
||||
"varsIgnorePattern": "^_",
|
||||
"argsIgnorePattern": "^_",
|
||||
"destructuredArrayIgnorePattern": "^_",
|
||||
"ignoreRestSiblings": true
|
||||
}
|
||||
],
|
||||
"local-rules/no-test-com": "error",
|
||||
"local-rules/email-domain-example-com": "error",
|
||||
"no-console": "warn",
|
||||
|
@ -89,7 +105,8 @@
|
|||
{
|
||||
"varsIgnorePattern": "^_",
|
||||
"argsIgnorePattern": "^_",
|
||||
"destructuredArrayIgnorePattern": "^_"
|
||||
"destructuredArrayIgnorePattern": "^_",
|
||||
"ignoreRestSiblings": true
|
||||
}
|
||||
],
|
||||
"import/no-relative-packages": "error",
|
||||
|
|
|
@ -64,10 +64,11 @@ jobs:
|
|||
- run: yarn --frozen-lockfile
|
||||
|
||||
# Run build all the projects
|
||||
- name: Build
|
||||
run: |
|
||||
yarn build:oss
|
||||
yarn build:account-portal
|
||||
- name: Build OSS
|
||||
run: yarn build:oss
|
||||
- name: Build account portal
|
||||
run: yarn build:account-portal
|
||||
if: ${{ env.IS_OSS_CONTRIBUTOR == 'false' }}
|
||||
# Check the types of the projects built via esbuild
|
||||
- name: Check types
|
||||
run: |
|
||||
|
|
|
@ -150,6 +150,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
|
|||
| services.apps.autoscaling.maxReplicas | int | `10` | |
|
||||
| services.apps.autoscaling.minReplicas | int | `1` | |
|
||||
| services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. |
|
||||
| services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. |
|
||||
| services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. |
|
||||
| services.apps.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main apps container. |
|
||||
| services.apps.extraVolumes | list | `[]` | Additional volumes to the apps pod. |
|
||||
| services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. |
|
||||
| services.apps.livenessProbe | object | HTTP health checks. | Liveness probe configuration for apps pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.apps.logLevel | string | `"info"` | The log level for the apps service. |
|
||||
|
@ -162,6 +166,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
|
|||
| services.automationWorkers.autoscaling.minReplicas | int | `1` | |
|
||||
| services.automationWorkers.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the automation worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the automation worker pods. |
|
||||
| services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. |
|
||||
| services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. |
|
||||
| services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. |
|
||||
| services.automationWorkers.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main automationWorkers container. |
|
||||
| services.automationWorkers.extraVolumes | list | `[]` | Additional volumes to the automationWorkers pod. |
|
||||
| services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.automationWorkers.logLevel | string | `"info"` | The log level for the automation worker service. |
|
||||
| services.automationWorkers.readinessProbe | object | HTTP health checks. | Readiness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
|
@ -180,6 +188,9 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
|
|||
| services.objectStore.cloudfront.cdn | string | `""` | Set the url of a distribution to enable cloudfront. |
|
||||
| services.objectStore.cloudfront.privateKey64 | string | `""` | Base64 encoded private key for the above public key. |
|
||||
| services.objectStore.cloudfront.publicKeyId | string | `""` | ID of public key stored in cloudfront. |
|
||||
| services.objectStore.extraContainers | list | `[]` | Additional containers to be added to the objectStore pod. |
|
||||
| services.objectStore.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main objectStore container. |
|
||||
| services.objectStore.extraVolumes | list | `[]` | Additional volumes to the objectStore pod. |
|
||||
| services.objectStore.minio | bool | `true` | Set to false if using another object store, such as S3. You will need to set `services.objectStore.url` to point to your bucket if you do this. |
|
||||
| services.objectStore.region | string | `""` | AWS_REGION if using S3 |
|
||||
| services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
|
||||
|
@ -191,12 +202,19 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
|
|||
| services.proxy.autoscaling.maxReplicas | int | `10` | |
|
||||
| services.proxy.autoscaling.minReplicas | int | `1` | |
|
||||
| services.proxy.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the proxy service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the proxy pods. |
|
||||
| services.proxy.extraContainers | list | `[]` | |
|
||||
| services.proxy.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main proxy container. |
|
||||
| services.proxy.extraVolumes | list | `[]` | Additional volumes to the proxy pod. |
|
||||
| services.proxy.livenessProbe | object | HTTP health checks. | Liveness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.proxy.readinessProbe | object | HTTP health checks. | Readiness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.proxy.replicaCount | int | `1` | The number of proxy replicas to run. |
|
||||
| services.proxy.resources | object | `{}` | The resources to use for proxy pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
|
||||
| services.proxy.startupProbe | object | HTTP health checks. | Startup probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.redis.enabled | bool | `true` | Whether or not to deploy a Redis pod into your cluster. |
|
||||
| services.redis.extraContainers | list | `[]` | Additional containers to be added to the redis pod. |
|
||||
| services.redis.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main redis container. |
|
||||
| services.redis.extraVolumes | list | `[]` | Additional volumes to the redis pod. |
|
||||
| services.redis.image | string | `"redis"` | The Redis image to use. |
|
||||
| services.redis.password | string | `"budibase"` | The password to use when connecting to Redis. It's recommended that you change this from the default if you're running Redis in-cluster. |
|
||||
| services.redis.port | int | `6379` | Port to expose Redis on. |
|
||||
| services.redis.resources | object | `{}` | The resources to use for Redis pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
|
||||
|
@ -207,6 +225,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
|
|||
| services.worker.autoscaling.maxReplicas | int | `10` | |
|
||||
| services.worker.autoscaling.minReplicas | int | `1` | |
|
||||
| services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. |
|
||||
| services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. |
|
||||
| services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. |
|
||||
| services.worker.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main worker container. |
|
||||
| services.worker.extraVolumes | list | `[]` | Additional volumes to the worker pod. |
|
||||
| services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. |
|
||||
| services.worker.livenessProbe | object | HTTP health checks. | Liveness probe configuration for worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
|
||||
| services.worker.logLevel | string | `"info"` | The log level for the worker service. |
|
||||
|
@ -225,4 +247,4 @@ $ helm uninstall --namespace budibase budibase
|
|||
```
|
||||
|
||||
----------------------------------------------
|
||||
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3)
|
||||
Autogenerated from chart metadata using [helm-docs v1.13.1](https://github.com/norwoodj/helm-docs/releases/v1.13.1)
|
||||
|
|
|
@ -235,6 +235,13 @@ spec:
|
|||
args:
|
||||
{{- toYaml .Values.services.apps.args | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.apps.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- toYaml .Values.services.apps.extraVolumeMounts | nindent 10 }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.apps.extraContainers }}
|
||||
{{- toYaml .Values.services.apps.extraContainers | nindent 6 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -258,4 +265,8 @@ spec:
|
|||
- name: ndots
|
||||
value: {{ .Values.services.apps.ndots | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.apps.extraVolumes }}
|
||||
volumes:
|
||||
{{- toYaml .Values.services.apps.extraVolumes | nindent 6 }}
|
||||
{{- end }}
|
||||
status: {}
|
||||
|
|
|
@ -235,6 +235,13 @@ spec:
|
|||
args:
|
||||
{{- toYaml .Values.services.automationWorkers.args | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.automationWorkers.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- toYaml .Values.services.automationWorkers.extraVolumeMounts | nindent 10 }}
|
||||
{{ end }}
|
||||
{{- if .Values.services.automationWorkers.extraContainers }}
|
||||
{{- toYaml .Values.services.automationWorkers.extraContainers | nindent 6 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -258,5 +265,9 @@ spec:
|
|||
- name: ndots
|
||||
value: {{ .Values.services.automationWorkers.ndots | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.automationWorkers.extraVolumes }}
|
||||
volumes:
|
||||
{{- toYaml .Values.services.automationWorkers.extraVolumes | nindent 8 }}
|
||||
{{ end }}
|
||||
status: {}
|
||||
{{- end }}
|
|
@ -54,6 +54,12 @@ spec:
|
|||
volumeMounts:
|
||||
- mountPath: /data
|
||||
name: minio-data
|
||||
{{ if .Values.services.objectStore.extraVolumeMounts }}
|
||||
{{- toYaml .Values.services.objectStore.extraVolumeMounts | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.objectStore.extraContainers }}
|
||||
{{- toYaml .Values.services.objectStore.extraContainers | nindent 6 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -75,5 +81,8 @@ spec:
|
|||
- name: minio-data
|
||||
persistentVolumeClaim:
|
||||
claimName: minio-data
|
||||
{{ if .Values.services.objectStore.extraVolumes }}
|
||||
{{- toYaml .Values.services.objectStore.extraVolumes | nindent 6 }}
|
||||
{{- end }}
|
||||
status: {}
|
||||
{{- end }}
|
||||
|
|
|
@ -82,7 +82,13 @@ spec:
|
|||
resources:
|
||||
{{- toYaml . | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.proxy.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- toYaml .Values.services.proxy.extraVolumeMounts | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.proxy.extraContainers }}
|
||||
{{- toYaml .Values.services.proxy.extraContainers | nindent 6 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -108,7 +114,10 @@ spec:
|
|||
args:
|
||||
{{- toYaml .Values.services.proxy.args | nindent 8 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.proxy.extraVolumes }}
|
||||
volumes:
|
||||
{{- toYaml .Values.services.proxy.extraVolumes | nindent 6 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.proxy.ndots }}
|
||||
dnsConfig:
|
||||
options:
|
||||
|
|
|
@ -22,7 +22,7 @@ spec:
|
|||
- redis-server
|
||||
- --requirepass
|
||||
- {{ .Values.services.redis.password }}
|
||||
image: redis
|
||||
image: {{ .Values.services.redis.image }}
|
||||
imagePullPolicy: ""
|
||||
name: redis-service
|
||||
ports:
|
||||
|
@ -34,6 +34,12 @@ spec:
|
|||
volumeMounts:
|
||||
- mountPath: /data
|
||||
name: redis-data
|
||||
{{ if .Values.services.redis.extraVolumeMounts }}
|
||||
{{- toYaml .Values.services.redis.extraVolumeMounts | nindent 8 }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.redis.extraContainers }}
|
||||
{{- toYaml .Values.services.redis.extraContainers | nindent 6 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -55,6 +61,9 @@ spec:
|
|||
- name: redis-data
|
||||
persistentVolumeClaim:
|
||||
claimName: redis-data
|
||||
{{ if .Values.services.redis.extraVolumes }}
|
||||
{{- toYaml .Values.services.redis.extraVolumes | nindent 6 }}
|
||||
{{- end }}
|
||||
|
||||
status: {}
|
||||
{{- end }}
|
||||
|
|
|
@ -221,6 +221,13 @@ spec:
|
|||
args:
|
||||
{{- toYaml .Values.services.worker.args | nindent 10 }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.worker.extraVolumeMounts }}
|
||||
volumeMounts:
|
||||
{{- toYaml .Values.services.worker.extraVolumeMounts | nindent 10 }}
|
||||
{{- end }}
|
||||
{{- if .Values.services.worker.extraContainers }}
|
||||
{{- toYaml .Values.services.worker.extraContainers | nindent 6 }}
|
||||
{{- end }}
|
||||
{{- with .Values.affinity }}
|
||||
affinity:
|
||||
{{- toYaml . | nindent 8 }}
|
||||
|
@ -244,4 +251,8 @@ spec:
|
|||
- name: ndots
|
||||
value: {{ .Values.services.worker.ndots | quote }}
|
||||
{{ end }}
|
||||
{{ if .Values.services.worker.extraVolumes }}
|
||||
volumes:
|
||||
{{- toYaml .Values.services.worker.extraVolumes | nindent 6 }}
|
||||
{{- end }}
|
||||
status: {}
|
||||
|
|
|
@ -206,6 +206,20 @@ services:
|
|||
# for autoscaling to work, you will need to have metrics-server
|
||||
# configured, and resources set for the proxy pods.
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# -- Additional containers to be added to the proxy pod.
|
||||
extraContainers: []
|
||||
# - name: my-sidecar
|
||||
# image: myimage:latest
|
||||
|
||||
# -- Additional volumeMounts to the main proxy container.
|
||||
extraVolumeMounts: []
|
||||
# - name: my-volume
|
||||
# mountPath: /path/to/mount
|
||||
|
||||
# -- Additional volumes to the proxy pod.
|
||||
extraVolumes: []
|
||||
# - name: my-volume
|
||||
# emptyDir: {}
|
||||
|
||||
apps:
|
||||
# @ignore (you shouldn't need to change this)
|
||||
|
@ -274,6 +288,20 @@ services:
|
|||
# autoscaling to work, you will need to have metrics-server configured,
|
||||
# and resources set for the apps pods.
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# -- Additional containers to be added to the apps pod.
|
||||
extraContainers: []
|
||||
# - name: my-sidecar
|
||||
# image: myimage:latest
|
||||
|
||||
# -- Additional volumeMounts to the main apps container.
|
||||
extraVolumeMounts: []
|
||||
# - name: my-volume
|
||||
# mountPath: /path/to/mount
|
||||
|
||||
# -- Additional volumes to the apps pod.
|
||||
extraVolumes: []
|
||||
# - name: my-volume
|
||||
# emptyDir: {}
|
||||
|
||||
automationWorkers:
|
||||
# -- Whether or not to enable the automation worker service. If you disable this,
|
||||
|
@ -346,6 +374,20 @@ services:
|
|||
# Note that for autoscaling to work, you will need to have metrics-server
|
||||
# configured, and resources set for the automation worker pods.
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# -- Additional containers to be added to the automationWorkers pod.
|
||||
extraContainers: []
|
||||
# - name: my-sidecar
|
||||
# image: myimage:latest
|
||||
|
||||
# -- Additional volumeMounts to the main automationWorkers container.
|
||||
extraVolumeMounts: []
|
||||
# - name: my-volume
|
||||
# mountPath: /path/to/mount
|
||||
|
||||
# -- Additional volumes to the automationWorkers pod.
|
||||
extraVolumes: []
|
||||
# - name: my-volume
|
||||
# emptyDir: {}
|
||||
|
||||
worker:
|
||||
# @ignore (you shouldn't need to change this)
|
||||
|
@ -414,6 +456,20 @@ services:
|
|||
# for autoscaling to work, you will need to have metrics-server
|
||||
# configured, and resources set for the worker pods.
|
||||
targetCPUUtilizationPercentage: 80
|
||||
# -- Additional containers to be added to the worker pod.
|
||||
extraContainers: []
|
||||
# - name: my-sidecar
|
||||
# image: myimage:latest
|
||||
|
||||
# -- Additional volumeMounts to the main worker container.
|
||||
extraVolumeMounts: []
|
||||
# - name: my-volume
|
||||
# mountPath: /path/to/mount
|
||||
|
||||
# -- Additional volumes to the worker pod.
|
||||
extraVolumes: []
|
||||
# - name: my-volume
|
||||
# emptyDir: {}
|
||||
|
||||
couchdb:
|
||||
# -- Whether or not to spin up a CouchDB instance in your cluster. True by
|
||||
|
@ -440,6 +496,8 @@ services:
|
|||
resources: {}
|
||||
|
||||
redis:
|
||||
# -- The Redis image to use.
|
||||
image: redis
|
||||
# -- Whether or not to deploy a Redis pod into your cluster.
|
||||
enabled: true
|
||||
# -- Port to expose Redis on.
|
||||
|
@ -463,6 +521,20 @@ services:
|
|||
# <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/>
|
||||
# for more information on how to set these.
|
||||
resources: {}
|
||||
# -- Additional containers to be added to the redis pod.
|
||||
extraContainers: []
|
||||
# - name: my-sidecar
|
||||
# image: myimage:latest
|
||||
|
||||
# -- Additional volumeMounts to the main redis container.
|
||||
extraVolumeMounts: []
|
||||
# - name: my-volume
|
||||
# mountPath: /path/to/mount
|
||||
|
||||
# -- Additional volumes to the redis pod.
|
||||
extraVolumes: []
|
||||
# - name: my-volume
|
||||
# emptyDir: {}
|
||||
|
||||
objectStore:
|
||||
# -- Set to false if using another object store, such as S3. You will need
|
||||
|
@ -488,7 +560,7 @@ services:
|
|||
# do this.
|
||||
url: "http://minio-service:9000"
|
||||
# -- How much storage to give Minio in its PersistentVolumeClaim.
|
||||
storage: 100Mi
|
||||
storage: 2Gi
|
||||
# -- If defined, storageClassName: <storageClass> If set to "-",
|
||||
# storageClassName: "", which disables dynamic provisioning If undefined
|
||||
# (the default) or set to null, no storageClassName spec is set, choosing
|
||||
|
@ -505,6 +577,20 @@ services:
|
|||
publicKeyId: ""
|
||||
# -- Base64 encoded private key for the above public key.
|
||||
privateKey64: ""
|
||||
# -- Additional containers to be added to the objectStore pod.
|
||||
extraContainers: []
|
||||
# - name: my-sidecar
|
||||
# image: myimage:latest
|
||||
|
||||
# -- Additional volumeMounts to the main objectStore container.
|
||||
extraVolumeMounts: []
|
||||
# - name: my-volume
|
||||
# mountPath: /path/to/mount
|
||||
|
||||
# -- Additional volumes to the objectStore pod.
|
||||
extraVolumes: []
|
||||
# - name: my-volume
|
||||
# emptyDir: {}
|
||||
|
||||
# Override values in couchDB subchart. We're only specifying the values we're changing.
|
||||
# If you want to see all of the available values, see:
|
||||
|
|
|
@ -17,6 +17,7 @@ APP_PORT=4002
|
|||
WORKER_PORT=4003
|
||||
MINIO_PORT=4004
|
||||
COUCH_DB_PORT=4005
|
||||
COUCH_DB_SQS_PORT=4006
|
||||
REDIS_PORT=6379
|
||||
WATCHTOWER_PORT=6161
|
||||
BUDIBASE_ENVIRONMENT=PRODUCTION
|
||||
|
@ -28,4 +29,4 @@ BB_ADMIN_USER_PASSWORD=
|
|||
|
||||
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
|
||||
PLUGINS_DIR=
|
||||
ROLLING_LOG_MAX_SIZE=
|
||||
ROLLING_LOG_MAX_SIZE=
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.23.3",
|
||||
"version": "2.23.6",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -56,6 +56,7 @@
|
|||
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
|
||||
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
|
||||
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
|
||||
"dev:camunda": "./scripts/deploy-camunda.sh",
|
||||
"dev:all": "yarn run kill-all && lerna run --stream dev",
|
||||
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
|
||||
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit a0ee9cad8cefb8f9f40228705711be174f018fa9
|
||||
Subproject commit eb7d5da233885c5cffd9c255d3e954d0cd39185e
|
|
@ -8,19 +8,9 @@ import {
|
|||
SearchParams,
|
||||
WithRequired,
|
||||
} from "@budibase/types"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
|
||||
const QUERY_START_REGEX = /\d[0-9]*:/g
|
||||
|
||||
export function removeKeyNumbering(key: any): string {
|
||||
if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) {
|
||||
const parts = key.split(":")
|
||||
// remove the number
|
||||
parts.shift()
|
||||
return parts.join(":")
|
||||
} else {
|
||||
return key
|
||||
}
|
||||
}
|
||||
export const removeKeyNumbering = dataFilters.removeKeyNumbering
|
||||
|
||||
/**
|
||||
* Class to build lucene query URLs.
|
||||
|
|
|
@ -107,7 +107,7 @@ const environment = {
|
|||
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
|
||||
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
|
||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4984",
|
||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
|
||||
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
|
||||
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
|
||||
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
|
||||
|
|
|
@ -115,7 +115,6 @@ class InMemoryQueue implements Partial<Queue> {
|
|||
* a JSON message as this is required by Bull.
|
||||
* @param repeat serves no purpose for the import queue.
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
async add(data: any, opts?: JobOptions) {
|
||||
const jobId = opts?.jobId?.toString()
|
||||
if (jobId && this._queuedJobIds.has(jobId)) {
|
||||
|
@ -166,8 +165,7 @@ class InMemoryQueue implements Partial<Queue> {
|
|||
return []
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async removeJobs(pattern: string) {
|
||||
async removeJobs(_pattern: string) {
|
||||
// no-op
|
||||
}
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@ import {
|
|||
ContextUser,
|
||||
CouchFindOptions,
|
||||
DatabaseQueryOpts,
|
||||
SearchQuery,
|
||||
SearchQueryOperators,
|
||||
SearchFilters,
|
||||
SearchFilterOperator,
|
||||
SearchUsersRequest,
|
||||
User,
|
||||
} from "@budibase/types"
|
||||
|
@ -44,11 +44,11 @@ function removeUserPassword(users: User | User[]) {
|
|||
return users
|
||||
}
|
||||
|
||||
export function isSupportedUserSearch(query: SearchQuery) {
|
||||
export function isSupportedUserSearch(query: SearchFilters) {
|
||||
const allowed = [
|
||||
{ op: SearchQueryOperators.STRING, key: "email" },
|
||||
{ op: SearchQueryOperators.EQUAL, key: "_id" },
|
||||
{ op: SearchQueryOperators.ONE_OF, key: "_id" },
|
||||
{ op: SearchFilterOperator.STRING, key: "email" },
|
||||
{ op: SearchFilterOperator.EQUAL, key: "_id" },
|
||||
{ op: SearchFilterOperator.ONE_OF, key: "_id" },
|
||||
]
|
||||
for (let [key, operation] of Object.entries(query)) {
|
||||
if (typeof operation !== "object") {
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
notifications,
|
||||
Checkbox,
|
||||
DatePicker,
|
||||
DrawerContent,
|
||||
} from "@budibase/bbui"
|
||||
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
|
||||
import { automationStore, selectedAutomation, tables } from "stores/builder"
|
||||
|
@ -37,7 +38,7 @@
|
|||
hbAutocomplete,
|
||||
EditorModes,
|
||||
} from "components/common/CodeEditor"
|
||||
import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte"
|
||||
import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
|
||||
import { LuceneUtils, Utils } from "@budibase/frontend-core"
|
||||
import {
|
||||
getSchemaForDatasourcePlus,
|
||||
|
@ -442,15 +443,16 @@
|
|||
<Button cta slot="buttons" on:click={() => saveFilters(key)}>
|
||||
Save
|
||||
</Button>
|
||||
<FilterDrawer
|
||||
slot="body"
|
||||
{filters}
|
||||
{bindings}
|
||||
{schemaFields}
|
||||
datasource={{ type: "table", tableId }}
|
||||
panel={AutomationBindingPanel}
|
||||
on:change={e => (tempFilters = e.detail)}
|
||||
/>
|
||||
<DrawerContent slot="body">
|
||||
<FilterBuilder
|
||||
{filters}
|
||||
{bindings}
|
||||
{schemaFields}
|
||||
datasource={{ type: "table", tableId }}
|
||||
panel={AutomationBindingPanel}
|
||||
on:change={e => (tempFilters = e.detail)}
|
||||
/>
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
{:else if value.customType === "password"}
|
||||
<Input
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<script>
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { ActionButton, Modal, ModalContent } from "@budibase/bbui"
|
||||
import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte"
|
||||
import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
|
||||
|
||||
export let schema
|
||||
export let filters
|
||||
|
@ -40,7 +40,7 @@
|
|||
onConfirm={() => dispatch("change", tempValue)}
|
||||
>
|
||||
<div class="wrapper">
|
||||
<FilterDrawer
|
||||
<FilterBuilder
|
||||
allowBindings={false}
|
||||
{filters}
|
||||
{schemaFields}
|
||||
|
|
|
@ -27,14 +27,6 @@
|
|||
return []
|
||||
}
|
||||
}
|
||||
|
||||
async function deleteAttachments(fileList) {
|
||||
try {
|
||||
return await API.deleteBuilderAttachments(fileList)
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<Dropzone
|
||||
|
@ -42,6 +34,5 @@
|
|||
{label}
|
||||
{...$$restProps}
|
||||
{processFiles}
|
||||
{deleteAttachments}
|
||||
{handleFileTooLarge}
|
||||
/>
|
||||
|
|
|
@ -9,7 +9,6 @@
|
|||
"",
|
||||
requiredValidator
|
||||
)
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const [repeatPassword, _, repeatTouched] = createValidationStore(
|
||||
"",
|
||||
requiredValidator
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
<script>
|
||||
import { Select, Label } from "@budibase/bbui"
|
||||
import { onMount } from "svelte"
|
||||
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
|
||||
import { FieldType } from "@budibase/types"
|
||||
import { tables, viewsV2 } from "stores/builder"
|
||||
|
||||
export let parameters
|
||||
export let bindings = []
|
||||
|
||||
const fileOptions = [
|
||||
{
|
||||
label: "Attachment",
|
||||
value: "attachment",
|
||||
},
|
||||
{
|
||||
label: "URL",
|
||||
value: "url",
|
||||
},
|
||||
]
|
||||
|
||||
$: tableOptions = $tables.list.map(table => ({
|
||||
label: table.name,
|
||||
resourceId: table._id,
|
||||
schema: table.schema,
|
||||
}))
|
||||
$: viewOptions = $viewsV2.list.map(view => ({
|
||||
label: view.name,
|
||||
resourceId: view.id,
|
||||
schema: view.schema,
|
||||
}))
|
||||
$: options = [...(tableOptions || []), ...(viewOptions || [])]
|
||||
|
||||
$: selectedTable =
|
||||
parameters.tableId && options.find(t => t.resourceId === parameters.tableId)
|
||||
$: attachmentColumns =
|
||||
selectedTable &&
|
||||
Object.values(selectedTable.schema).filter(c =>
|
||||
[FieldType.ATTACHMENTS, FieldType.ATTACHMENT_SINGLE].includes(c.type)
|
||||
)
|
||||
|
||||
onMount(() => {
|
||||
if (!parameters.type) {
|
||||
parameters.type = "attachment"
|
||||
}
|
||||
})
|
||||
</script>
|
||||
|
||||
<div class="root">
|
||||
<Label small>File</Label>
|
||||
<Select
|
||||
placeholder={null}
|
||||
bind:value={parameters.type}
|
||||
options={fileOptions}
|
||||
/>
|
||||
{#if parameters.type === "attachment"}
|
||||
<Label>Table</Label>
|
||||
<Select
|
||||
placeholder={null}
|
||||
bind:value={parameters.tableId}
|
||||
{options}
|
||||
getOptionLabel={table => table.label}
|
||||
getOptionValue={table => table.resourceId}
|
||||
/>
|
||||
<Label small>Column</Label>
|
||||
<Select
|
||||
disabled={!attachmentColumns?.length}
|
||||
placeholder={parameters.tableId && !attachmentColumns?.length
|
||||
? "This table has no attachment columns"
|
||||
: undefined}
|
||||
bind:value={parameters.attachmentColumn}
|
||||
options={attachmentColumns?.map(c => c.name)}
|
||||
/>
|
||||
|
||||
<Label small>Row ID</Label>
|
||||
<DrawerBindableInput
|
||||
{bindings}
|
||||
title="Row ID"
|
||||
value={parameters.rowId}
|
||||
on:change={value => (parameters.rowId = value.detail)}
|
||||
/>
|
||||
{:else}
|
||||
<Label small>URL</Label>
|
||||
<DrawerBindableInput
|
||||
title="URL"
|
||||
{bindings}
|
||||
value={parameters.url}
|
||||
on:change={value => (parameters.url = value.detail)}
|
||||
/>
|
||||
<Label small>File name</Label>
|
||||
<DrawerBindableInput
|
||||
title="File name"
|
||||
{bindings}
|
||||
value={parameters.fileName}
|
||||
on:change={value => (parameters.fileName = value.detail)}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.root {
|
||||
display: grid;
|
||||
column-gap: var(--spacing-l);
|
||||
row-gap: var(--spacing-s);
|
||||
grid-template-columns: 60px 1fr;
|
||||
align-items: center;
|
||||
max-width: 800px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
</style>
|
|
@ -22,3 +22,4 @@ export { default as PromptUser } from "./PromptUser.svelte"
|
|||
export { default as OpenSidePanel } from "./OpenSidePanel.svelte"
|
||||
export { default as CloseSidePanel } from "./CloseSidePanel.svelte"
|
||||
export { default as ClearRowSelection } from "./ClearRowSelection.svelte"
|
||||
export { default as DownloadFile } from "./DownloadFile.svelte"
|
||||
|
|
|
@ -161,6 +161,11 @@
|
|||
"name": "Clear Row Selection",
|
||||
"type": "data",
|
||||
"component": "ClearRowSelection"
|
||||
},
|
||||
{
|
||||
"name": "Download File",
|
||||
"type": "data",
|
||||
"component": "DownloadFile"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -0,0 +1,84 @@
|
|||
<script>
|
||||
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
|
||||
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
||||
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import { FilterBuilder } from "@budibase/frontend-core"
|
||||
|
||||
import { createEventDispatcher, onMount } from "svelte"
|
||||
|
||||
export let schemaFields
|
||||
export let filters = []
|
||||
export let bindings = []
|
||||
export let panel = ClientBindingPanel
|
||||
export let allowBindings = true
|
||||
export let datasource
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
||||
let rawFilters
|
||||
|
||||
$: parseFilters(rawFilters)
|
||||
$: dispatch("change", enrichFilters(rawFilters))
|
||||
|
||||
// Remove field key prefixes and determine which behaviours to use
|
||||
const parseFilters = filters => {
|
||||
rawFilters = (filters || []).map(filter => {
|
||||
const { field } = filter
|
||||
let newFilter = { ...filter }
|
||||
delete newFilter.allOr
|
||||
newFilter.field = dataFilters.removeKeyNumbering(field)
|
||||
return newFilter
|
||||
})
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
parseFilters(filters)
|
||||
rawFilters.forEach(filter => {
|
||||
filter.type =
|
||||
schemaFields.find(field => field.name === filter.field)?.type ||
|
||||
filter.type
|
||||
})
|
||||
})
|
||||
|
||||
// Add field key prefixes and a special metadata filter object to indicate
|
||||
// how to handle filter behaviour
|
||||
const enrichFilters = rawFilters => {
|
||||
let count = 1
|
||||
return rawFilters
|
||||
.filter(filter => filter.field)
|
||||
.map(filter => ({
|
||||
...filter,
|
||||
field: `${count++}:${filter.field}`,
|
||||
}))
|
||||
.concat(...rawFilters.filter(filter => !filter.field))
|
||||
}
|
||||
</script>
|
||||
|
||||
<FilterBuilder
|
||||
bind:filters={rawFilters}
|
||||
behaviourFilters={true}
|
||||
{schemaFields}
|
||||
{datasource}
|
||||
{allowBindings}
|
||||
>
|
||||
<div slot="filtering-hero-content" />
|
||||
|
||||
<DrawerBindableInput
|
||||
let:filter
|
||||
slot="binding"
|
||||
disabled={filter.noValue}
|
||||
title={filter.field}
|
||||
value={filter.value}
|
||||
placeholder="Value"
|
||||
{panel}
|
||||
{bindings}
|
||||
on:change={event => {
|
||||
const indexToUpdate = rawFilters.findIndex(f => f.id === filter.id)
|
||||
rawFilters[indexToUpdate] = {
|
||||
...rawFilters[indexToUpdate],
|
||||
value: event.detail,
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</FilterBuilder>
|
|
@ -1,8 +1,14 @@
|
|||
<script>
|
||||
import { notifications, ActionButton, Button, Drawer } from "@budibase/bbui"
|
||||
import {
|
||||
notifications,
|
||||
ActionButton,
|
||||
Button,
|
||||
Drawer,
|
||||
DrawerContent,
|
||||
} from "@budibase/bbui"
|
||||
import { createEventDispatcher } from "svelte"
|
||||
import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding"
|
||||
import FilterDrawer from "./FilterDrawer.svelte"
|
||||
import FilterBuilder from "./FilterBuilder.svelte"
|
||||
import { selectedScreen } from "stores/builder"
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
|
@ -40,14 +46,15 @@
|
|||
</div>
|
||||
<Drawer bind:this={drawer} title="Filtering" on:drawerHide on:drawerShow>
|
||||
<Button cta slot="buttons" on:click={saveFilter}>Save</Button>
|
||||
<FilterDrawer
|
||||
slot="body"
|
||||
filters={value}
|
||||
{bindings}
|
||||
{schemaFields}
|
||||
{datasource}
|
||||
on:change={e => (tempValue = e.detail)}
|
||||
/>
|
||||
<DrawerContent slot="body">
|
||||
<FilterBuilder
|
||||
filters={value}
|
||||
{bindings}
|
||||
{schemaFields}
|
||||
{datasource}
|
||||
on:change={e => (tempValue = e.detail)}
|
||||
/>
|
||||
</DrawerContent>
|
||||
</Drawer>
|
||||
|
||||
<style>
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
Input,
|
||||
notifications,
|
||||
} from "@budibase/bbui"
|
||||
import { downloadFile } from "@budibase/frontend-core"
|
||||
import { createValidationStore } from "helpers/validation/yup"
|
||||
|
||||
export let app
|
||||
|
@ -55,40 +56,13 @@
|
|||
const exportApp = async () => {
|
||||
const id = published ? app.prodId : app.devId
|
||||
const url = `/api/backups/export?appId=${id}`
|
||||
await downloadFile(url, {
|
||||
excludeRows: !includeInternalTablesRows,
|
||||
encryptPassword: password,
|
||||
})
|
||||
}
|
||||
|
||||
async function downloadFile(url, body) {
|
||||
try {
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
const downloaded = await downloadFile(url, {
|
||||
excludeRows: !includeInternalTablesRows,
|
||||
encryptPassword: password,
|
||||
})
|
||||
|
||||
if (response.ok) {
|
||||
const contentDisposition = response.headers.get("Content-Disposition")
|
||||
|
||||
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
|
||||
contentDisposition
|
||||
)
|
||||
|
||||
const filename = matches[1].replace(/['"]/g, "")
|
||||
|
||||
const url = URL.createObjectURL(await response.blob())
|
||||
|
||||
const link = document.createElement("a")
|
||||
link.href = url
|
||||
link.download = filename
|
||||
link.click()
|
||||
|
||||
URL.revokeObjectURL(url)
|
||||
} else {
|
||||
if (!downloaded) {
|
||||
notifications.error("Error exporting the app.")
|
||||
}
|
||||
} catch (error) {
|
||||
|
|
|
@ -4,8 +4,6 @@ import {
|
|||
createDatasourceCreationStore,
|
||||
} from "./datasourceCreation"
|
||||
import { get } from "svelte/store"
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
import { shouldIntegrationFetchTableNames } from "stores/selectors"
|
||||
|
||||
vi.mock("stores/selectors", () => ({
|
||||
shouldIntegrationFetchTableNames: vi.fn(),
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
import { it, expect, describe, beforeEach, vi } from "vitest"
|
||||
import { createOnGoogleAuthStore } from "./onGoogleAuth"
|
||||
import { writable, get } from "svelte/store"
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
// eslint-disable-next-line
|
||||
import { params } from "@roxi/routify"
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
// eslint-disable-next-line
|
||||
import { integrations } from "stores/builder"
|
||||
import { IntegrationTypes } from "constants/backend"
|
||||
|
||||
|
|
|
@ -137,8 +137,12 @@
|
|||
const activeTag = document.activeElement?.tagName.toLowerCase()
|
||||
const inCodeEditor =
|
||||
document.activeElement?.classList?.contains("cm-content")
|
||||
const inPosthogSurvey =
|
||||
document.activeElement?.classList?.[0]?.startsWith("PostHogSurvey")
|
||||
if (
|
||||
(inCodeEditor || ["input", "textarea"].indexOf(activeTag) !== -1) &&
|
||||
(inCodeEditor ||
|
||||
inPosthogSurvey ||
|
||||
["input", "textarea"].indexOf(activeTag) !== -1) &&
|
||||
e.key !== "Escape"
|
||||
) {
|
||||
return
|
||||
|
|
|
@ -105,7 +105,6 @@ export function getAppService(path: string) {
|
|||
}
|
||||
|
||||
export function updateDockerComposeService(
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
updateFn: (service: DockerCompose) => void
|
||||
) {
|
||||
const opts = ["docker-compose.yaml", "docker-compose.yml"]
|
||||
|
|
|
@ -157,6 +157,11 @@
|
|||
width: 100%;
|
||||
}
|
||||
|
||||
/* Use normal theme colors for links when using a top nav */
|
||||
.dropdown:not(.left) .sublinks a {
|
||||
color: var(--spectrum-alias-text-color);
|
||||
}
|
||||
|
||||
/* Left dropdowns */
|
||||
.dropdown.left .sublinks-wrapper {
|
||||
display: none;
|
||||
|
|
|
@ -1,216 +1,14 @@
|
|||
<script>
|
||||
import {
|
||||
Body,
|
||||
Button,
|
||||
Combobox,
|
||||
DatePicker,
|
||||
Icon,
|
||||
Input,
|
||||
Layout,
|
||||
Select,
|
||||
} from "@budibase/bbui"
|
||||
import { generate } from "shortid"
|
||||
import { LuceneUtils, Constants } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import { FilterBuilder } from "@budibase/frontend-core"
|
||||
|
||||
export let schemaFields
|
||||
export let filters = []
|
||||
export let datasource
|
||||
|
||||
const context = getContext("context")
|
||||
const BannedTypes = ["link", "attachment", "json"]
|
||||
|
||||
$: fieldOptions = (schemaFields ?? [])
|
||||
.filter(
|
||||
field =>
|
||||
!BannedTypes.includes(field.type) ||
|
||||
(field.type === "formula" && field.formulaType === "static")
|
||||
)
|
||||
.map(field => ({
|
||||
label: field.displayName || field.name,
|
||||
value: field.name,
|
||||
}))
|
||||
|
||||
const addFilter = () => {
|
||||
filters = [
|
||||
...filters,
|
||||
{
|
||||
id: generate(),
|
||||
field: null,
|
||||
operator: Constants.OperatorOptions.Equals.value,
|
||||
value: null,
|
||||
valueType: "Value",
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
const removeFilter = id => {
|
||||
filters = filters.filter(field => field.id !== id)
|
||||
}
|
||||
|
||||
const duplicateFilter = id => {
|
||||
const existingFilter = filters.find(filter => filter.id === id)
|
||||
const duplicate = { ...existingFilter, id: generate() }
|
||||
filters = [...filters, duplicate]
|
||||
}
|
||||
|
||||
const onFieldChange = (expression, field) => {
|
||||
// Update the field type
|
||||
expression.type = schemaFields.find(x => x.name === field)?.type
|
||||
expression.externalType = schemaFields.find(
|
||||
x => x.name === field
|
||||
)?.externalType
|
||||
|
||||
// Ensure a valid operator is set
|
||||
const validOperators = LuceneUtils.getValidOperatorsForType(
|
||||
{ type: expression.type },
|
||||
expression.field,
|
||||
datasource
|
||||
).map(x => x.value)
|
||||
if (!validOperators.includes(expression.operator)) {
|
||||
expression.operator =
|
||||
validOperators[0] ?? Constants.OperatorOptions.Equals.value
|
||||
onOperatorChange(expression, expression.operator)
|
||||
}
|
||||
|
||||
// if changed to an array, change default value to empty array
|
||||
const idx = filters.findIndex(x => x.field === field)
|
||||
if (expression.type === "array") {
|
||||
filters[idx].value = []
|
||||
} else {
|
||||
filters[idx].value = null
|
||||
}
|
||||
}
|
||||
|
||||
const onOperatorChange = (expression, operator) => {
|
||||
const noValueOptions = [
|
||||
Constants.OperatorOptions.Empty.value,
|
||||
Constants.OperatorOptions.NotEmpty.value,
|
||||
]
|
||||
expression.noValue = noValueOptions.includes(operator)
|
||||
if (expression.noValue) {
|
||||
expression.value = null
|
||||
}
|
||||
}
|
||||
|
||||
const getFieldOptions = field => {
|
||||
const schema = schemaFields.find(x => x.name === field)
|
||||
return schema?.constraints?.inclusion || []
|
||||
}
|
||||
|
||||
const getSchema = filter => {
|
||||
return schemaFields.find(field => field.name === filter.field)
|
||||
}
|
||||
</script>
|
||||
|
||||
<div class="container" class:mobile={$context.device.mobile}>
|
||||
<Layout noPadding>
|
||||
<Body size="S">
|
||||
{#if !filters?.length}
|
||||
Add your first filter expression.
|
||||
{:else}
|
||||
Results are filtered to only those which match all of the following
|
||||
constraints.
|
||||
{/if}
|
||||
</Body>
|
||||
{#if filters?.length}
|
||||
<div class="fields">
|
||||
{#each filters as filter}
|
||||
<Select
|
||||
bind:value={filter.field}
|
||||
options={fieldOptions}
|
||||
on:change={e => onFieldChange(filter, e.detail)}
|
||||
placeholder="Column"
|
||||
/>
|
||||
<Select
|
||||
disabled={!filter.field}
|
||||
options={LuceneUtils.getValidOperatorsForType(
|
||||
{ type: filter.type, subtype: filter.subtype },
|
||||
filter.field,
|
||||
datasource
|
||||
)}
|
||||
bind:value={filter.operator}
|
||||
on:change={e => onOperatorChange(filter, e.detail)}
|
||||
placeholder={null}
|
||||
/>
|
||||
{#if ["string", "longform", "number", "bigint", "formula"].includes(filter.type)}
|
||||
<Input disabled={filter.noValue} bind:value={filter.value} />
|
||||
{:else if ["options", "array"].includes(filter.type)}
|
||||
<Combobox
|
||||
disabled={filter.noValue}
|
||||
options={getFieldOptions(filter.field)}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else if filter.type === "boolean"}
|
||||
<Combobox
|
||||
disabled={filter.noValue}
|
||||
options={[
|
||||
{ label: "True", value: "true" },
|
||||
{ label: "False", value: "false" },
|
||||
]}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else if filter.type === "datetime"}
|
||||
<DatePicker
|
||||
disabled={filter.noValue}
|
||||
enableTime={!getSchema(filter).dateOnly}
|
||||
timeOnly={getSchema(filter).timeOnly}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else}
|
||||
<Input disabled />
|
||||
{/if}
|
||||
<div class="controls">
|
||||
<Icon
|
||||
name="Duplicate"
|
||||
hoverable
|
||||
size="S"
|
||||
on:click={() => duplicateFilter(filter.id)}
|
||||
/>
|
||||
<Icon
|
||||
name="Close"
|
||||
hoverable
|
||||
size="S"
|
||||
on:click={() => removeFilter(filter.id)}
|
||||
/>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
{/if}
|
||||
<div>
|
||||
<Button icon="AddCircle" size="M" secondary on:click={addFilter}>
|
||||
Add filter
|
||||
</Button>
|
||||
</div>
|
||||
</Layout>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.container {
|
||||
width: 100%;
|
||||
max-width: 1000px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
.fields {
|
||||
display: grid;
|
||||
column-gap: var(--spacing-l);
|
||||
row-gap: var(--spacing-s);
|
||||
align-items: center;
|
||||
grid-template-columns: 1fr 120px 1fr auto auto;
|
||||
}
|
||||
.controls {
|
||||
display: contents;
|
||||
}
|
||||
|
||||
.container.mobile .fields {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
.container.mobile .controls {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
padding: var(--spacing-s) 0;
|
||||
gap: var(--spacing-s);
|
||||
}
|
||||
</style>
|
||||
<FilterBuilder bind:filters {schemaFields} {datasource} filtersLabel={null}>
|
||||
<div slot="filtering-hero-content">
|
||||
Results are filtered to only those which match all of the following
|
||||
constraints.
|
||||
</div>
|
||||
</FilterBuilder>
|
||||
|
|
|
@ -58,17 +58,6 @@
|
|||
}
|
||||
}
|
||||
|
||||
const deleteAttachments = async fileList => {
|
||||
try {
|
||||
return await API.deleteAttachments({
|
||||
keys: fileList,
|
||||
tableId: formContext?.dataSource?.tableId,
|
||||
})
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
const handleChange = e => {
|
||||
const value = fieldApiMapper.set(e.detail)
|
||||
const changed = fieldApi.setValue(value)
|
||||
|
@ -98,7 +87,6 @@
|
|||
error={fieldState.error}
|
||||
on:change={handleChange}
|
||||
{processFiles}
|
||||
{deleteAttachments}
|
||||
{handleFileTooLarge}
|
||||
{handleTooManyFiles}
|
||||
{maximum}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { get } from "svelte/store"
|
||||
import download from "downloadjs"
|
||||
import { downloadStream } from "@budibase/frontend-core"
|
||||
import {
|
||||
routeStore,
|
||||
builderStore,
|
||||
|
@ -400,6 +401,51 @@ const closeSidePanelHandler = () => {
|
|||
sidePanelStore.actions.close()
|
||||
}
|
||||
|
||||
const downloadFileHandler = async action => {
|
||||
const { url, fileName } = action.parameters
|
||||
try {
|
||||
const { type } = action.parameters
|
||||
if (type === "attachment") {
|
||||
const { tableId, rowId, attachmentColumn } = action.parameters
|
||||
const res = await API.downloadAttachment(
|
||||
tableId,
|
||||
rowId,
|
||||
attachmentColumn,
|
||||
{ suppressErrors: true }
|
||||
)
|
||||
await downloadStream(res)
|
||||
return
|
||||
}
|
||||
|
||||
const response = await fetch(url)
|
||||
|
||||
if (!response.ok) {
|
||||
notificationStore.actions.error(
|
||||
`Failed to download from '${url}'. Server returned status code: ${response.status}`
|
||||
)
|
||||
return
|
||||
}
|
||||
|
||||
const objectUrl = URL.createObjectURL(await response.blob())
|
||||
|
||||
const link = document.createElement("a")
|
||||
link.href = objectUrl
|
||||
link.download = fileName
|
||||
link.click()
|
||||
|
||||
URL.revokeObjectURL(objectUrl)
|
||||
} catch (e) {
|
||||
console.error(e)
|
||||
if (e.status) {
|
||||
notificationStore.actions.error(
|
||||
`Failed to download from '${url}'. Server returned status code: ${e.status}`
|
||||
)
|
||||
} else {
|
||||
notificationStore.actions.error(`Failed to download from '${url}'.`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const handlerMap = {
|
||||
["Fetch Row"]: fetchRowHandler,
|
||||
["Save Row"]: saveRowHandler,
|
||||
|
@ -418,6 +464,7 @@ const handlerMap = {
|
|||
["Prompt User"]: promptUserHandler,
|
||||
["Open Side Panel"]: openSidePanelHandler,
|
||||
["Close Side Panel"]: closeSidePanelHandler,
|
||||
["Download File"]: downloadFileHandler,
|
||||
}
|
||||
|
||||
const confirmTextMap = {
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"allowJs": true,
|
||||
"strict": true,
|
||||
"outDir": "dist",
|
||||
"paths": {
|
||||
"@budibase/*": [
|
||||
"../*/src/index.ts",
|
||||
"../*/src/index.js",
|
||||
"../*",
|
||||
"../../node_modules/@budibase/*"
|
||||
],
|
||||
"*": ["./src/*"]
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*"]
|
||||
}
|
|
@ -11,6 +11,7 @@
|
|||
"@budibase/types": "0.0.0",
|
||||
"dayjs": "^1.10.8",
|
||||
"lodash": "4.17.21",
|
||||
"shortid": "2.2.15",
|
||||
"socket.io-client": "^4.6.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -61,31 +61,17 @@ export const buildAttachmentEndpoints = API => {
|
|||
})
|
||||
return { publicUrl }
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes attachments from the bucket.
|
||||
* @param keys the attachments to delete
|
||||
* @param tableId the associated table ID
|
||||
* Download an attachment from a row given its column name.
|
||||
* @param datasourceId the ID of the datasource to download from
|
||||
* @param rowId the ID of the row to download from
|
||||
* @param columnName the column name to download
|
||||
*/
|
||||
deleteAttachments: async ({ keys, tableId }) => {
|
||||
return await API.post({
|
||||
url: `/api/attachments/${tableId}/delete`,
|
||||
body: {
|
||||
keys,
|
||||
},
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Deletes attachments from the builder bucket.
|
||||
* @param keys the attachments to delete
|
||||
*/
|
||||
deleteBuilderAttachments: async keys => {
|
||||
return await API.post({
|
||||
url: `/api/attachments/delete`,
|
||||
body: {
|
||||
keys,
|
||||
},
|
||||
downloadAttachment: async (datasourceId, rowId, columnName, options) => {
|
||||
return await API.get({
|
||||
url: `/api/${datasourceId}/rows/${rowId}/attachment/${columnName}`,
|
||||
parseResponse: response => response,
|
||||
suppressErrors: options?.suppressErrors,
|
||||
})
|
||||
},
|
||||
}
|
||||
|
|
|
@ -4,33 +4,36 @@
|
|||
Button,
|
||||
Combobox,
|
||||
DatePicker,
|
||||
DrawerContent,
|
||||
Icon,
|
||||
Input,
|
||||
Label,
|
||||
Layout,
|
||||
Multiselect,
|
||||
Select,
|
||||
Label,
|
||||
Multiselect,
|
||||
} from "@budibase/bbui"
|
||||
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
|
||||
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
|
||||
import { FieldType, SearchFilterOperator } from "@budibase/types"
|
||||
import { generate } from "shortid"
|
||||
import { Constants, LuceneUtils } from "@budibase/frontend-core"
|
||||
import { getFields } from "helpers/searchFields"
|
||||
import { FieldType } from "@budibase/types"
|
||||
import { createEventDispatcher, onMount } from "svelte"
|
||||
import { LuceneUtils, Constants } from "@budibase/frontend-core"
|
||||
import { getContext } from "svelte"
|
||||
import FilterUsers from "./FilterUsers.svelte"
|
||||
|
||||
const { OperatorOptions } = Constants
|
||||
|
||||
export let schemaFields
|
||||
export let filters = []
|
||||
export let bindings = []
|
||||
export let panel = ClientBindingPanel
|
||||
export let allowBindings = true
|
||||
export let datasource
|
||||
export let behaviourFilters = false
|
||||
export let allowBindings = false
|
||||
export let filtersLabel = "Filters"
|
||||
|
||||
$: matchAny = filters?.find(filter => filter.operator === "allOr") != null
|
||||
$: onEmptyFilter =
|
||||
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
|
||||
|
||||
$: fieldFilters = filters.filter(
|
||||
filter => filter.operator !== "allOr" && !filter.onEmptyFilter
|
||||
)
|
||||
|
||||
const dispatch = createEventDispatcher()
|
||||
const { OperatorOptions } = Constants
|
||||
const KeyedFieldRegex = /\d[0-9]*:/g
|
||||
const behaviourOptions = [
|
||||
{ value: "and", label: "Match all filters" },
|
||||
{ value: "or", label: "Match any filter" },
|
||||
|
@ -40,62 +43,18 @@
|
|||
{ value: "none", label: "Return no rows" },
|
||||
]
|
||||
|
||||
let rawFilters
|
||||
let matchAny = false
|
||||
let onEmptyFilter = "all"
|
||||
const context = getContext("context")
|
||||
|
||||
$: parseFilters(filters)
|
||||
$: dispatch("change", enrichFilters(rawFilters, matchAny, onEmptyFilter))
|
||||
$: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true })
|
||||
$: fieldOptions = enrichedSchemaFields.map(field => field.name) || []
|
||||
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"]
|
||||
|
||||
// Remove field key prefixes and determine which behaviours to use
|
||||
const parseFilters = filters => {
|
||||
matchAny = filters?.find(filter => filter.operator === "allOr") != null
|
||||
onEmptyFilter =
|
||||
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
|
||||
rawFilters = (filters || [])
|
||||
.filter(filter => filter.operator !== "allOr" && !filter.onEmptyFilter)
|
||||
.map(filter => {
|
||||
const { field } = filter
|
||||
let newFilter = { ...filter }
|
||||
delete newFilter.allOr
|
||||
if (typeof field === "string" && field.match(KeyedFieldRegex) != null) {
|
||||
const parts = field.split(":")
|
||||
parts.shift()
|
||||
newFilter.field = parts.join(":")
|
||||
}
|
||||
return newFilter
|
||||
})
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
parseFilters(filters)
|
||||
rawFilters.forEach(filter => {
|
||||
filter.type =
|
||||
schemaFields.find(field => field.name === filter.field)?.type ||
|
||||
filter.type
|
||||
})
|
||||
})
|
||||
|
||||
// Add field key prefixes and a special metadata filter object to indicate
|
||||
// how to handle filter behaviour
|
||||
const enrichFilters = (rawFilters, matchAny, onEmptyFilter) => {
|
||||
let count = 1
|
||||
return rawFilters
|
||||
.filter(filter => filter.field)
|
||||
.map(filter => ({
|
||||
...filter,
|
||||
field: `${count++}:${filter.field}`,
|
||||
}))
|
||||
.concat(matchAny ? [{ operator: "allOr" }] : [])
|
||||
.concat([{ onEmptyFilter }])
|
||||
}
|
||||
$: fieldOptions = (schemaFields ?? [])
|
||||
.filter(field => getValidOperatorsForType(field).length)
|
||||
.map(field => ({
|
||||
label: field.displayName || field.name,
|
||||
value: field.name,
|
||||
}))
|
||||
|
||||
const addFilter = () => {
|
||||
rawFilters = [
|
||||
...rawFilters,
|
||||
filters = [
|
||||
...(filters || []),
|
||||
{
|
||||
id: generate(),
|
||||
field: null,
|
||||
|
@ -107,22 +66,57 @@
|
|||
}
|
||||
|
||||
const removeFilter = id => {
|
||||
rawFilters = rawFilters.filter(field => field.id !== id)
|
||||
filters = filters.filter(field => field.id !== id)
|
||||
}
|
||||
|
||||
const duplicateFilter = id => {
|
||||
const existingFilter = rawFilters.find(filter => filter.id === id)
|
||||
const existingFilter = filters.find(filter => filter.id === id)
|
||||
const duplicate = { ...existingFilter, id: generate() }
|
||||
rawFilters = [...rawFilters, duplicate]
|
||||
filters = [...filters, duplicate]
|
||||
}
|
||||
|
||||
const onFieldChange = filter => {
|
||||
const previousType = filter.type
|
||||
sanitizeTypes(filter)
|
||||
sanitizeOperator(filter)
|
||||
sanitizeValue(filter, previousType)
|
||||
}
|
||||
|
||||
const onOperatorChange = filter => {
|
||||
sanitizeOperator(filter)
|
||||
sanitizeValue(filter, filter.type)
|
||||
}
|
||||
|
||||
const onValueTypeChange = filter => {
|
||||
sanitizeValue(filter)
|
||||
}
|
||||
|
||||
const getFieldOptions = field => {
|
||||
const schema = schemaFields.find(x => x.name === field)
|
||||
return schema?.constraints?.inclusion || []
|
||||
}
|
||||
|
||||
const getSchema = filter => {
|
||||
return enrichedSchemaFields.find(field => field.name === filter.field)
|
||||
return schemaFields.find(field => field.name === filter.field)
|
||||
}
|
||||
|
||||
const getValidOperatorsForType = filter => {
|
||||
if (!filter?.field && !filter?.name) {
|
||||
return []
|
||||
}
|
||||
|
||||
return LuceneUtils.getValidOperatorsForType(
|
||||
filter,
|
||||
filter.field || filter.name,
|
||||
datasource
|
||||
)
|
||||
}
|
||||
|
||||
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"]
|
||||
|
||||
const sanitizeTypes = filter => {
|
||||
// Update type based on field
|
||||
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field)
|
||||
const fieldSchema = schemaFields.find(x => x.name === filter.field)
|
||||
filter.type = fieldSchema?.type
|
||||
filter.subtype = fieldSchema?.subtype
|
||||
|
||||
|
@ -154,88 +148,79 @@
|
|||
|
||||
// Ensure array values are properly set and cleared
|
||||
if (Array.isArray(filter.value)) {
|
||||
if (filter.valueType !== "Value" || filter.type !== "array") {
|
||||
if (filter.valueType !== "Value" || filter.type !== FieldType.ARRAY) {
|
||||
filter.value = null
|
||||
}
|
||||
} else if (filter.type === "array" && filter.valueType === "Value") {
|
||||
} else if (
|
||||
filter.type === FieldType.ARRAY &&
|
||||
filter.valueType === "Value"
|
||||
) {
|
||||
filter.value = []
|
||||
} else if (
|
||||
previousType !== filter.type &&
|
||||
(previousType === FieldType.BB_REFERENCE ||
|
||||
filter.type === FieldType.BB_REFERENCE)
|
||||
) {
|
||||
filter.value = filter.type === "array" ? [] : null
|
||||
filter.value = filter.type === FieldType.ARRAY ? [] : null
|
||||
}
|
||||
}
|
||||
|
||||
const onFieldChange = filter => {
|
||||
const previousType = filter.type
|
||||
sanitizeTypes(filter)
|
||||
sanitizeOperator(filter)
|
||||
sanitizeValue(filter, previousType)
|
||||
}
|
||||
|
||||
const onOperatorChange = filter => {
|
||||
sanitizeOperator(filter)
|
||||
sanitizeValue(filter, filter.type)
|
||||
}
|
||||
|
||||
const onValueTypeChange = filter => {
|
||||
sanitizeValue(filter)
|
||||
}
|
||||
|
||||
const getFieldOptions = field => {
|
||||
const schema = enrichedSchemaFields.find(x => x.name === field)
|
||||
return schema?.constraints?.inclusion || []
|
||||
}
|
||||
|
||||
const getValidOperatorsForType = filter => {
|
||||
if (!filter?.field) {
|
||||
return []
|
||||
function handleAllOr(option) {
|
||||
filters = filters.filter(f => f.operator !== "allOr")
|
||||
if (option === "or") {
|
||||
filters.push({ operator: "allOr" })
|
||||
}
|
||||
}
|
||||
|
||||
return LuceneUtils.getValidOperatorsForType(
|
||||
{ type: filter.type, subtype: filter.subtype },
|
||||
filter.field,
|
||||
datasource
|
||||
)
|
||||
function handleOnEmptyFilter(value) {
|
||||
filters = filters?.filter(filter => !filter.onEmptyFilter)
|
||||
filters.push({ onEmptyFilter: value })
|
||||
}
|
||||
</script>
|
||||
|
||||
<DrawerContent>
|
||||
<div class="container">
|
||||
<Layout noPadding>
|
||||
{#if !rawFilters?.length}
|
||||
<Body size="S">Add your first filter expression.</Body>
|
||||
{:else}
|
||||
<div class="fields">
|
||||
<Select
|
||||
label="Behaviour"
|
||||
value={matchAny ? "or" : "and"}
|
||||
options={behaviourOptions}
|
||||
getOptionLabel={opt => opt.label}
|
||||
getOptionValue={opt => opt.value}
|
||||
on:change={e => (matchAny = e.detail === "or")}
|
||||
placeholder={null}
|
||||
/>
|
||||
{#if datasource?.type === "table"}
|
||||
<Select
|
||||
label="When filter empty"
|
||||
value={onEmptyFilter}
|
||||
options={onEmptyOptions}
|
||||
getOptionLabel={opt => opt.label}
|
||||
getOptionValue={opt => opt.value}
|
||||
on:change={e => (onEmptyFilter = e.detail)}
|
||||
placeholder={null}
|
||||
/>
|
||||
<div class="container" class:mobile={$context?.device?.mobile}>
|
||||
<Layout noPadding>
|
||||
{#if fieldOptions?.length}
|
||||
<Body size="S">
|
||||
{#if !fieldFilters?.length}
|
||||
Add your first filter expression.
|
||||
{:else}
|
||||
<slot name="filtering-hero-content" />
|
||||
{#if behaviourFilters}
|
||||
<div class="behaviour-filters">
|
||||
<Select
|
||||
label="Behaviour"
|
||||
value={matchAny ? "or" : "and"}
|
||||
options={behaviourOptions}
|
||||
getOptionLabel={opt => opt.label}
|
||||
getOptionValue={opt => opt.value}
|
||||
on:change={e => handleAllOr(e.detail)}
|
||||
placeholder={null}
|
||||
/>
|
||||
{#if datasource?.type === "table"}
|
||||
<Select
|
||||
label="When filter empty"
|
||||
value={onEmptyFilter}
|
||||
options={onEmptyOptions}
|
||||
getOptionLabel={opt => opt.label}
|
||||
getOptionValue={opt => opt.value}
|
||||
on:change={e => handleOnEmptyFilter(e.detail)}
|
||||
placeholder={null}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
</Body>
|
||||
{#if fieldFilters?.length}
|
||||
<div>
|
||||
<div class="filter-label">
|
||||
<Label>Filters</Label>
|
||||
</div>
|
||||
<div class="fields">
|
||||
{#each rawFilters as filter}
|
||||
{#if filtersLabel}
|
||||
<div class="filter-label">
|
||||
<Label>{filtersLabel}</Label>
|
||||
</div>
|
||||
{/if}
|
||||
<div class="fields" class:with-bindings={allowBindings}>
|
||||
{#each fieldFilters as filter}
|
||||
<Select
|
||||
bind:value={filter.field}
|
||||
options={fieldOptions}
|
||||
|
@ -249,38 +234,32 @@
|
|||
on:change={() => onOperatorChange(filter)}
|
||||
placeholder={null}
|
||||
/>
|
||||
<Select
|
||||
disabled={filter.noValue || !filter.field}
|
||||
options={valueTypeOptions}
|
||||
bind:value={filter.valueType}
|
||||
on:change={() => onValueTypeChange(filter)}
|
||||
placeholder={null}
|
||||
/>
|
||||
{#if filter.field && filter.valueType === "Binding"}
|
||||
<DrawerBindableInput
|
||||
disabled={filter.noValue}
|
||||
title={filter.field}
|
||||
value={filter.value}
|
||||
placeholder="Value"
|
||||
{panel}
|
||||
{bindings}
|
||||
on:change={event => (filter.value = event.detail)}
|
||||
{#if allowBindings}
|
||||
<Select
|
||||
disabled={filter.noValue || !filter.field}
|
||||
options={valueTypeOptions}
|
||||
bind:value={filter.valueType}
|
||||
on:change={() => onValueTypeChange(filter)}
|
||||
placeholder={null}
|
||||
/>
|
||||
{:else if ["string", "longform", "number", "bigint", "formula"].includes(filter.type)}
|
||||
{/if}
|
||||
{#if allowBindings && filter.field && filter.valueType === "Binding"}
|
||||
<slot name="binding" {filter} />
|
||||
{:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)}
|
||||
<Input disabled={filter.noValue} bind:value={filter.value} />
|
||||
{:else if filter.type === "array" || (filter.type === "options" && filter.operator === "oneOf")}
|
||||
{:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === SearchFilterOperator.ONE_OF)}
|
||||
<Multiselect
|
||||
disabled={filter.noValue}
|
||||
options={getFieldOptions(filter.field)}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else if filter.type === "options"}
|
||||
{:else if filter.type === FieldType.OPTIONS}
|
||||
<Combobox
|
||||
disabled={filter.noValue}
|
||||
options={getFieldOptions(filter.field)}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else if filter.type === "boolean"}
|
||||
{:else if filter.type === FieldType.BOOLEAN}
|
||||
<Combobox
|
||||
disabled={filter.noValue}
|
||||
options={[
|
||||
|
@ -289,7 +268,7 @@
|
|||
]}
|
||||
bind:value={filter.value}
|
||||
/>
|
||||
{:else if filter.type === "datetime"}
|
||||
{:else if filter.type === FieldType.DATETIME}
|
||||
<DatePicker
|
||||
disabled={filter.noValue}
|
||||
enableTime={!getSchema(filter)?.dateOnly}
|
||||
|
@ -306,32 +285,36 @@
|
|||
disabled={filter.noValue}
|
||||
/>
|
||||
{:else}
|
||||
<DrawerBindableInput disabled />
|
||||
<Input disabled />
|
||||
{/if}
|
||||
<Icon
|
||||
name="Duplicate"
|
||||
hoverable
|
||||
size="S"
|
||||
on:click={() => duplicateFilter(filter.id)}
|
||||
/>
|
||||
<Icon
|
||||
name="Close"
|
||||
hoverable
|
||||
size="S"
|
||||
on:click={() => removeFilter(filter.id)}
|
||||
/>
|
||||
<div class="controls">
|
||||
<Icon
|
||||
name="Duplicate"
|
||||
hoverable
|
||||
size="S"
|
||||
on:click={() => duplicateFilter(filter.id)}
|
||||
/>
|
||||
<Icon
|
||||
name="Close"
|
||||
hoverable
|
||||
size="S"
|
||||
on:click={() => removeFilter(filter.id)}
|
||||
/>
|
||||
</div>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
{/if}
|
||||
<div class="bottom">
|
||||
<div>
|
||||
<Button icon="AddCircle" size="M" secondary on:click={addFilter}>
|
||||
Add filter
|
||||
</Button>
|
||||
</div>
|
||||
</Layout>
|
||||
</div>
|
||||
</DrawerContent>
|
||||
{:else}
|
||||
<Body size="S">None of the table column can be used for filtering.</Body>
|
||||
{/if}
|
||||
</Layout>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.container {
|
||||
|
@ -339,22 +322,42 @@
|
|||
max-width: 1000px;
|
||||
margin: 0 auto;
|
||||
}
|
||||
|
||||
.fields {
|
||||
display: grid;
|
||||
column-gap: var(--spacing-l);
|
||||
row-gap: var(--spacing-s);
|
||||
align-items: center;
|
||||
grid-template-columns: 1fr 120px 1fr auto auto;
|
||||
}
|
||||
.fields.with-bindings {
|
||||
grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px;
|
||||
}
|
||||
|
||||
.controls {
|
||||
display: contents;
|
||||
}
|
||||
|
||||
.container.mobile .fields {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
.container.mobile .controls {
|
||||
display: flex;
|
||||
flex-direction: row;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
padding: var(--spacing-s) 0;
|
||||
gap: var(--spacing-s);
|
||||
}
|
||||
|
||||
.filter-label {
|
||||
margin-bottom: var(--spacing-s);
|
||||
}
|
||||
|
||||
.bottom {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
.behaviour-filters {
|
||||
display: grid;
|
||||
column-gap: var(--spacing-l);
|
||||
row-gap: var(--spacing-s);
|
||||
align-items: center;
|
||||
grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px;
|
||||
}
|
||||
</style>
|
|
@ -1,9 +1,9 @@
|
|||
<script>
|
||||
import { Select, Multiselect } from "@budibase/bbui"
|
||||
import { fetchData } from "@budibase/frontend-core"
|
||||
import { createAPIClient } from "../api"
|
||||
|
||||
import { API } from "api"
|
||||
|
||||
export let API = createAPIClient()
|
||||
export let value = null
|
||||
export let disabled
|
||||
export let multiselect = false
|
|
@ -61,14 +61,6 @@
|
|||
}
|
||||
}
|
||||
|
||||
const deleteAttachments = async fileList => {
|
||||
try {
|
||||
return await API.deleteBuilderAttachments(fileList)
|
||||
} catch (error) {
|
||||
return []
|
||||
}
|
||||
}
|
||||
|
||||
onMount(() => {
|
||||
api = {
|
||||
focus: () => open(),
|
||||
|
@ -101,7 +93,6 @@
|
|||
on:change={e => onChange(e.detail)}
|
||||
maximum={maximum || schema.constraints?.length?.maximum}
|
||||
{processFiles}
|
||||
{deleteAttachments}
|
||||
{handleFileTooLarge}
|
||||
/>
|
||||
</div>
|
||||
|
|
|
@ -6,3 +6,4 @@ export { default as UserAvatars } from "./UserAvatars.svelte"
|
|||
export { default as Updating } from "./Updating.svelte"
|
||||
export { Grid } from "./grid"
|
||||
export { default as ClientAppSkeleton } from "./ClientAppSkeleton.svelte"
|
||||
export { default as FilterBuilder } from "./FilterBuilder.svelte"
|
||||
|
|
|
@ -348,8 +348,7 @@ export default class DataFetch {
|
|||
* Determine the feature flag for this datasource definition
|
||||
* @param definition
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
determineFeatureFlags(definition) {
|
||||
determineFeatureFlags(_definition) {
|
||||
return {
|
||||
supportsSearch: false,
|
||||
supportsSort: false,
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
const extractFileNameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/
|
||||
|
||||
export function downloadText(filename, text) {
|
||||
if (typeof text === "object") {
|
||||
text = JSON.stringify(text)
|
||||
|
@ -17,9 +19,7 @@ export async function downloadStream(streamResponse) {
|
|||
|
||||
const contentDisposition = streamResponse.headers.get("Content-Disposition")
|
||||
|
||||
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
|
||||
contentDisposition
|
||||
)
|
||||
const matches = extractFileNameRegex.exec(contentDisposition)
|
||||
|
||||
const filename = matches[1].replace(/['"]/g, "")
|
||||
|
||||
|
@ -34,3 +34,33 @@ export async function downloadStream(streamResponse) {
|
|||
|
||||
URL.revokeObjectURL(blobUrl)
|
||||
}
|
||||
|
||||
export async function downloadFile(url, body) {
|
||||
const response = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
})
|
||||
|
||||
if (!response.ok) {
|
||||
return false
|
||||
} else {
|
||||
const contentDisposition = response.headers.get("Content-Disposition")
|
||||
|
||||
const matches = extractFileNameRegex.exec(contentDisposition)
|
||||
|
||||
const filename = matches[1].replace(/['"]/g, "")
|
||||
|
||||
const url = URL.createObjectURL(await response.blob())
|
||||
|
||||
const link = document.createElement("a")
|
||||
link.href = url
|
||||
link.download = filename
|
||||
link.click()
|
||||
|
||||
URL.revokeObjectURL(url)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit ef186d00241f96037f9fd34d7a3826041977ab3a
|
||||
Subproject commit 06b1064f7e2f7cac5d4bef2ee999796a2a1f0f2c
|
|
@ -50,10 +50,10 @@
|
|||
"@apidevtools/swagger-parser": "10.0.3",
|
||||
"@budibase/backend-core": "0.0.0",
|
||||
"@budibase/client": "0.0.0",
|
||||
"@budibase/frontend-core": "0.0.0",
|
||||
"@budibase/pro": "0.0.0",
|
||||
"@budibase/shared-core": "0.0.0",
|
||||
"@budibase/string-templates": "0.0.0",
|
||||
"@budibase/frontend-core": "0.0.0",
|
||||
"@budibase/types": "0.0.0",
|
||||
"@bull-board/api": "5.10.2",
|
||||
"@bull-board/koa": "5.10.2",
|
||||
|
@ -63,6 +63,7 @@
|
|||
"@socket.io/redis-adapter": "^8.2.1",
|
||||
"airtable": "0.10.1",
|
||||
"arangojs": "7.2.0",
|
||||
"archiver": "7.0.1",
|
||||
"aws-sdk": "2.1030.0",
|
||||
"bcrypt": "5.1.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
|
@ -124,6 +125,7 @@
|
|||
"@babel/preset-env": "7.16.11",
|
||||
"@swc/core": "1.3.71",
|
||||
"@swc/jest": "0.2.27",
|
||||
"@types/archiver": "6.0.2",
|
||||
"@types/global-agent": "2.1.1",
|
||||
"@types/google-spreadsheet": "3.1.5",
|
||||
"@types/jest": "29.5.5",
|
||||
|
|
|
@ -320,6 +320,7 @@ async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
|
|||
"theme",
|
||||
"customTheme",
|
||||
"icon",
|
||||
"snippets",
|
||||
]
|
||||
keys.forEach(key => {
|
||||
if (existing[key]) {
|
||||
|
|
|
@ -36,7 +36,6 @@ import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
|
|||
import { processObjectSync } from "@budibase/string-templates"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
import { db as dbCore } from "@budibase/backend-core"
|
||||
import AliasTables from "./alias"
|
||||
import sdk from "../../../sdk"
|
||||
import env from "../../../environment"
|
||||
|
||||
|
@ -120,6 +119,9 @@ async function removeManyToManyRelationships(
|
|||
endpoint: getEndpoint(tableId, Operation.DELETE),
|
||||
body: { [colName]: null },
|
||||
filters,
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
|
@ -134,6 +136,9 @@ async function removeOneToManyRelationships(rowId: string, table: Table) {
|
|||
return getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(tableId, Operation.UPDATE),
|
||||
filters,
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
} else {
|
||||
return []
|
||||
|
@ -249,6 +254,9 @@ export class ExternalRequest<T extends Operation> {
|
|||
const response = await getDatasourceAndQuery({
|
||||
endpoint: getEndpoint(table._id!, Operation.READ),
|
||||
filters: buildFilters(rowId, {}, table),
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
if (Array.isArray(response) && response.length > 0) {
|
||||
return response[0]
|
||||
|
@ -396,6 +404,9 @@ export class ExternalRequest<T extends Operation> {
|
|||
[fieldName]: row[lookupField],
|
||||
},
|
||||
},
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
// this is the response from knex if no rows found
|
||||
const rows: Row[] =
|
||||
|
@ -426,6 +437,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
// if we're creating (in a through table) need to wipe the existing ones first
|
||||
const promises = []
|
||||
const related = await this.lookupRelations(mainTableId, row)
|
||||
const table = this.getTable(mainTableId)!
|
||||
for (let relationship of relationships) {
|
||||
const { key, tableId, isUpdate, id, ...rest } = relationship
|
||||
const body: { [key: string]: any } = processObjectSync(rest, row, {})
|
||||
|
@ -471,6 +483,9 @@ export class ExternalRequest<T extends Operation> {
|
|||
// if we're doing many relationships then we're writing, only one response
|
||||
body,
|
||||
filters: buildFilters(id, {}, linkTable),
|
||||
meta: {
|
||||
table,
|
||||
},
|
||||
})
|
||||
)
|
||||
} else {
|
||||
|
@ -618,7 +633,7 @@ export class ExternalRequest<T extends Operation> {
|
|||
if (env.SQL_ALIASING_DISABLE) {
|
||||
response = await getDatasourceAndQuery(json)
|
||||
} else {
|
||||
const aliasing = new AliasTables(Object.keys(this.tables))
|
||||
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
|
||||
response = await aliasing.queryWithAliasing(json)
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
import stream from "stream"
|
||||
import archiver from "archiver"
|
||||
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { objectStore } from "@budibase/backend-core"
|
||||
import * as internal from "./internal"
|
||||
import * as external from "./external"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
|
@ -9,10 +13,12 @@ import {
|
|||
DeleteRows,
|
||||
ExportRowsRequest,
|
||||
ExportRowsResponse,
|
||||
FieldType,
|
||||
GetRowResponse,
|
||||
PatchRowRequest,
|
||||
PatchRowResponse,
|
||||
Row,
|
||||
RowAttachment,
|
||||
RowSearchParams,
|
||||
SearchRowRequest,
|
||||
SearchRowResponse,
|
||||
|
@ -251,3 +257,59 @@ export const exportRows = async (
|
|||
ctx.attachment(fileName)
|
||||
ctx.body = apiFileReturn(content)
|
||||
}
|
||||
|
||||
export async function downloadAttachment(ctx: UserCtx) {
|
||||
const { columnName } = ctx.params
|
||||
|
||||
const tableId = utils.getTableId(ctx)
|
||||
const row = await pickApi(tableId).find(ctx)
|
||||
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const columnSchema = table.schema[columnName]
|
||||
if (!columnSchema) {
|
||||
ctx.throw(400, `'${columnName}' is not valid`)
|
||||
}
|
||||
|
||||
const columnType = columnSchema.type
|
||||
|
||||
if (
|
||||
columnType !== FieldType.ATTACHMENTS &&
|
||||
columnType !== FieldType.ATTACHMENT_SINGLE
|
||||
) {
|
||||
ctx.throw(404, `'${columnName}' is not valid attachment column`)
|
||||
}
|
||||
|
||||
const attachments: RowAttachment[] =
|
||||
columnType === FieldType.ATTACHMENTS ? row[columnName] : [row[columnName]]
|
||||
|
||||
if (!attachments?.length) {
|
||||
ctx.throw(404)
|
||||
}
|
||||
|
||||
if (attachments.length === 1) {
|
||||
const attachment = attachments[0]
|
||||
ctx.attachment(attachment.name)
|
||||
ctx.body = await objectStore.getReadStream(
|
||||
objectStore.ObjectStoreBuckets.APPS,
|
||||
attachment.key
|
||||
)
|
||||
} else {
|
||||
const passThrough = new stream.PassThrough()
|
||||
const archive = archiver.create("zip")
|
||||
archive.pipe(passThrough)
|
||||
|
||||
for (const attachment of attachments) {
|
||||
const attachmentStream = await objectStore.getReadStream(
|
||||
objectStore.ObjectStoreBuckets.APPS,
|
||||
attachment.key
|
||||
)
|
||||
archive.append(attachmentStream, { name: attachment.name })
|
||||
}
|
||||
|
||||
const displayName = row[table.primaryDisplay || "_id"]
|
||||
ctx.attachment(`${displayName}_${columnName}.zip`)
|
||||
archive.finalize()
|
||||
ctx.body = passThrough
|
||||
ctx.type = "zip"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,12 +62,12 @@ export function basicProcessing({
|
|||
row,
|
||||
table,
|
||||
isLinked,
|
||||
internal,
|
||||
sqs,
|
||||
}: {
|
||||
row: Row
|
||||
table: Table
|
||||
isLinked: boolean
|
||||
internal?: boolean
|
||||
sqs?: boolean
|
||||
}): Row {
|
||||
const thisRow: Row = {}
|
||||
// filter the row down to what is actually the row (not joined)
|
||||
|
@ -84,12 +84,13 @@ export function basicProcessing({
|
|||
thisRow[fieldName] = value
|
||||
}
|
||||
}
|
||||
if (!internal) {
|
||||
if (!sqs) {
|
||||
thisRow._id = generateIdForRow(row, table, isLinked)
|
||||
thisRow.tableId = table._id
|
||||
thisRow._rev = "rev"
|
||||
} else {
|
||||
for (let internalColumn of CONSTANT_INTERNAL_ROW_COLS) {
|
||||
const columns = Object.keys(table.schema)
|
||||
for (let internalColumn of [...CONSTANT_INTERNAL_ROW_COLS, ...columns]) {
|
||||
thisRow[internalColumn] = extractFieldValue({
|
||||
row,
|
||||
tableName: table._id!,
|
||||
|
|
|
@ -51,11 +51,11 @@ export async function updateRelationshipColumns(
|
|||
continue
|
||||
}
|
||||
|
||||
let linked = await basicProcessing({
|
||||
let linked = basicProcessing({
|
||||
row,
|
||||
table: linkedTable,
|
||||
isLinked: true,
|
||||
internal: opts?.sqs,
|
||||
sqs: opts?.sqs,
|
||||
})
|
||||
if (!linked._id) {
|
||||
continue
|
||||
|
|
|
@ -132,6 +132,7 @@ export async function sqlOutputProcessing(
|
|||
let rowId = row._id
|
||||
if (opts?.sqs) {
|
||||
rowId = getInternalRowId(row, table)
|
||||
row._id = rowId
|
||||
} else if (!rowId) {
|
||||
rowId = generateIdForRow(row, table)
|
||||
row._id = rowId
|
||||
|
@ -153,7 +154,7 @@ export async function sqlOutputProcessing(
|
|||
row,
|
||||
table,
|
||||
isLinked: false,
|
||||
internal: opts?.sqs,
|
||||
sqs: opts?.sqs,
|
||||
}),
|
||||
table
|
||||
)
|
||||
|
@ -167,7 +168,8 @@ export async function sqlOutputProcessing(
|
|||
tables,
|
||||
row,
|
||||
finalRows,
|
||||
relationships
|
||||
relationships,
|
||||
opts
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -127,13 +127,6 @@ export const uploadFile = async function (
|
|||
)
|
||||
}
|
||||
|
||||
export const deleteObjects = async function (ctx: Ctx) {
|
||||
ctx.body = await objectStore.deleteFiles(
|
||||
ObjectStoreBuckets.APPS,
|
||||
ctx.request.body.keys
|
||||
)
|
||||
}
|
||||
|
||||
const requiresMigration = async (ctx: Ctx) => {
|
||||
const appId = context.getAppId()
|
||||
if (!appId) {
|
||||
|
|
|
@ -22,6 +22,7 @@ export async function makeTableRequest(
|
|||
operation,
|
||||
},
|
||||
meta: {
|
||||
table,
|
||||
tables,
|
||||
},
|
||||
table,
|
||||
|
|
|
@ -77,6 +77,12 @@ router
|
|||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
rowController.exportRows
|
||||
)
|
||||
.get(
|
||||
"/api/:sourceId/rows/:rowId/attachment/:columnName",
|
||||
paramSubResource("sourceId", "rowId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.READ),
|
||||
rowController.downloadAttachment
|
||||
)
|
||||
|
||||
router.post(
|
||||
"/api/v2/views/:viewId/search",
|
||||
|
|
|
@ -32,11 +32,6 @@ router
|
|||
.get("/builder/:file*", controller.serveBuilder)
|
||||
.get("/api/assets/client", controller.serveClientLibrary)
|
||||
.post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
|
||||
.post(
|
||||
"/api/attachments/delete",
|
||||
authorized(BUILDER),
|
||||
controller.deleteObjects
|
||||
)
|
||||
.post("/api/beta/:feature", controller.toggleBetaUiFeature)
|
||||
.post(
|
||||
"/api/attachments/:tableId/upload",
|
||||
|
@ -44,12 +39,6 @@ router
|
|||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
controller.uploadFile
|
||||
)
|
||||
.post(
|
||||
"/api/attachments/:tableId/delete",
|
||||
paramResource("tableId"),
|
||||
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
|
||||
controller.deleteObjects
|
||||
)
|
||||
.get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview)
|
||||
.get("/app/:appUrl/:path*", controller.serveApp)
|
||||
.get("/:appId/:path*", controller.serveApp)
|
||||
|
|
|
@ -6,10 +6,14 @@ import {
|
|||
Datasource,
|
||||
EmptyFilterOption,
|
||||
FieldType,
|
||||
Row,
|
||||
RowSearchParams,
|
||||
SearchFilters,
|
||||
SortOrder,
|
||||
SortType,
|
||||
Table,
|
||||
TableSchema,
|
||||
} from "@budibase/types"
|
||||
import _ from "lodash"
|
||||
|
||||
jest.unmock("mssql")
|
||||
|
||||
|
@ -25,8 +29,8 @@ describe.each([
|
|||
const config = setup.getConfig()
|
||||
|
||||
let envCleanup: (() => void) | undefined
|
||||
let table: Table
|
||||
let datasource: Datasource | undefined
|
||||
let table: Table
|
||||
|
||||
beforeAll(async () => {
|
||||
if (isSqs) {
|
||||
|
@ -47,231 +51,503 @@ describe.each([
|
|||
}
|
||||
})
|
||||
|
||||
async function createTable(schema: TableSchema) {
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, { schema })
|
||||
)
|
||||
}
|
||||
|
||||
async function createRows(rows: Record<string, any>[]) {
|
||||
await Promise.all(rows.map(r => config.api.row.save(table._id!, r)))
|
||||
}
|
||||
|
||||
class SearchAssertion {
|
||||
constructor(private readonly query: RowSearchParams) {}
|
||||
|
||||
// Asserts that the query returns rows matching exactly the set of rows
|
||||
// passed in. The order of the rows matters. Rows returned in an order
|
||||
// different to the one passed in will cause the assertion to fail. Extra
|
||||
// rows returned by the query will also cause the assertion to fail.
|
||||
async toMatchExactly(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(expectedRows.length)
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toEqual(
|
||||
expectedRows.map((expectedRow: any) =>
|
||||
expect.objectContaining(
|
||||
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Asserts that the query returns rows matching exactly the set of rows
|
||||
// passed in. The order of the rows is not important, but extra rows will
|
||||
// cause the assertion to fail.
|
||||
async toContainExactly(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(expectedRows.length)
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toEqual(
|
||||
expect.arrayContaining(
|
||||
expectedRows.map((expectedRow: any) =>
|
||||
expect.objectContaining(
|
||||
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
// Asserts that the query returns rows matching the set of rows passed in.
|
||||
// The order of the rows is not important. Extra rows will not cause the
|
||||
// assertion to fail.
|
||||
async toContain(expectedRows: any[]) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toEqual(
|
||||
expect.arrayContaining(
|
||||
expectedRows.map((expectedRow: any) =>
|
||||
expect.objectContaining(
|
||||
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
async toFindNothing() {
|
||||
await this.toContainExactly([])
|
||||
}
|
||||
|
||||
async toHaveLength(length: number) {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
...this.query,
|
||||
tableId: table._id!,
|
||||
})
|
||||
|
||||
// eslint-disable-next-line jest/no-standalone-expect
|
||||
expect(foundRows).toHaveLength(length)
|
||||
}
|
||||
}
|
||||
|
||||
function expectSearch(query: Omit<RowSearchParams, "tableId">) {
|
||||
return new SearchAssertion({ ...query, tableId: table._id! })
|
||||
}
|
||||
|
||||
function expectQuery(query: SearchFilters) {
|
||||
return expectSearch({ query })
|
||||
}
|
||||
|
||||
describe("strings", () => {
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
schema: {
|
||||
name: {
|
||||
name: "name",
|
||||
type: FieldType.STRING,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
await createTable({
|
||||
name: { name: "name", type: FieldType.STRING },
|
||||
})
|
||||
await createRows([{ name: "foo" }, { name: "bar" }])
|
||||
})
|
||||
|
||||
const rows = [{ name: "foo" }, { name: "bar" }]
|
||||
let savedRows: Row[]
|
||||
describe("misc", () => {
|
||||
it("should return all if no query is passed", () =>
|
||||
expectSearch({} as RowSearchParams).toContainExactly([
|
||||
{ name: "foo" },
|
||||
{ name: "bar" },
|
||||
]))
|
||||
|
||||
beforeAll(async () => {
|
||||
savedRows = await Promise.all(
|
||||
rows.map(r => config.api.row.save(table._id!, r))
|
||||
)
|
||||
it("should return all if empty query is passed", () =>
|
||||
expectQuery({}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
|
||||
|
||||
it("should return all if onEmptyFilter is RETURN_ALL", () =>
|
||||
expectQuery({
|
||||
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
|
||||
}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
|
||||
|
||||
it("should return nothing if onEmptyFilter is RETURN_NONE", () =>
|
||||
expectQuery({
|
||||
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
|
||||
}).toFindNothing())
|
||||
|
||||
it("should respect limit", () =>
|
||||
expectSearch({ limit: 1, paginate: true, query: {} }).toHaveLength(1))
|
||||
})
|
||||
|
||||
interface StringSearchTest {
|
||||
query: SearchFilters
|
||||
expected: (typeof rows)[number][]
|
||||
}
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ equal: { name: "foo" } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
]))
|
||||
|
||||
const stringSearchTests: StringSearchTest[] = [
|
||||
// These three test cases are generic and don't really need
|
||||
// to be repeated for all data types, so we just do them here.
|
||||
{ query: {}, expected: rows },
|
||||
{
|
||||
query: { onEmptyFilter: EmptyFilterOption.RETURN_ALL },
|
||||
expected: rows,
|
||||
},
|
||||
{
|
||||
query: { onEmptyFilter: EmptyFilterOption.RETURN_NONE },
|
||||
expected: [],
|
||||
},
|
||||
// The rest of these tests are specific to strings.
|
||||
{ query: { string: { name: "foo" } }, expected: [rows[0]] },
|
||||
{ query: { string: { name: "none" } }, expected: [] },
|
||||
{ query: { fuzzy: { name: "oo" } }, expected: [rows[0]] },
|
||||
{ query: { equal: { name: "foo" } }, expected: [rows[0]] },
|
||||
{ query: { notEqual: { name: "foo" } }, expected: [rows[1]] },
|
||||
{ query: { oneOf: { name: ["foo"] } }, expected: [rows[0]] },
|
||||
]
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ equal: { name: "none" } }).toFindNothing())
|
||||
})
|
||||
|
||||
it.each(stringSearchTests)(
|
||||
`should be able to run query: $query`,
|
||||
async ({ query, expected }) => {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
tableId: table._id!,
|
||||
query,
|
||||
})
|
||||
expect(foundRows).toHaveLength(expected.length)
|
||||
expect(foundRows).toEqual(
|
||||
expect.arrayContaining(
|
||||
expected.map(r =>
|
||||
expect.objectContaining(savedRows.find(sr => sr.name === r.name)!)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ notEqual: { name: "foo" } }).toContainExactly([
|
||||
{ name: "bar" },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ notEqual: { name: "bar" } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
]))
|
||||
})
|
||||
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ oneOf: { name: ["none"] } }).toFindNothing())
|
||||
})
|
||||
|
||||
describe("fuzzy", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([
|
||||
{ name: "foo" },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ fuzzy: { name: "none" } }).toFindNothing())
|
||||
})
|
||||
|
||||
describe("range", () => {
|
||||
it("successfully finds multiple rows", () =>
|
||||
expectQuery({
|
||||
range: { name: { low: "a", high: "z" } },
|
||||
}).toContainExactly([{ name: "bar" }, { name: "foo" }]))
|
||||
|
||||
it("successfully finds a row with a high bound", () =>
|
||||
expectQuery({
|
||||
range: { name: { low: "a", high: "c" } },
|
||||
}).toContainExactly([{ name: "bar" }]))
|
||||
|
||||
it("successfully finds a row with a low bound", () =>
|
||||
expectQuery({
|
||||
range: { name: { low: "f", high: "z" } },
|
||||
}).toContainExactly([{ name: "foo" }]))
|
||||
|
||||
it("successfully finds no rows", () =>
|
||||
expectQuery({
|
||||
range: { name: { low: "g", high: "h" } },
|
||||
}).toFindNothing())
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
it("sorts ascending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
|
||||
|
||||
it("sorts descending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
|
||||
|
||||
it("sorts descending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "name",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("number", () => {
|
||||
describe("numbers", () => {
|
||||
beforeAll(async () => {
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
schema: {
|
||||
age: {
|
||||
name: "age",
|
||||
type: FieldType.NUMBER,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
await createTable({
|
||||
age: { name: "age", type: FieldType.NUMBER },
|
||||
})
|
||||
await createRows([{ age: 1 }, { age: 10 }])
|
||||
})
|
||||
|
||||
const rows = [{ age: 1 }, { age: 10 }]
|
||||
let savedRows: Row[]
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }]))
|
||||
|
||||
beforeAll(async () => {
|
||||
savedRows = await Promise.all(
|
||||
rows.map(r => config.api.row.save(table._id!, r))
|
||||
)
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ equal: { age: 2 } }).toFindNothing())
|
||||
})
|
||||
|
||||
interface NumberSearchTest {
|
||||
query: SearchFilters
|
||||
expected: (typeof rows)[number][]
|
||||
}
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ notEqual: { age: 1 } }).toContainExactly([{ age: 10 }]))
|
||||
|
||||
const numberSearchTests: NumberSearchTest[] = [
|
||||
{ query: { equal: { age: 1 } }, expected: [rows[0]] },
|
||||
{ query: { equal: { age: 2 } }, expected: [] },
|
||||
{ query: { notEqual: { age: 1 } }, expected: [rows[1]] },
|
||||
{ query: { oneOf: { age: [1] } }, expected: [rows[0]] },
|
||||
{ query: { range: { age: { low: 1, high: 5 } } }, expected: [rows[0]] },
|
||||
{ query: { range: { age: { low: 0, high: 1 } } }, expected: [rows[0]] },
|
||||
{ query: { range: { age: { low: 3, high: 4 } } }, expected: [] },
|
||||
{ query: { range: { age: { low: 0, high: 11 } } }, expected: rows },
|
||||
]
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ notEqual: { age: 10 } }).toContainExactly([{ age: 1 }]))
|
||||
})
|
||||
|
||||
it.each(numberSearchTests)(
|
||||
`should be able to run query: $query`,
|
||||
async ({ query, expected }) => {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
tableId: table._id!,
|
||||
query,
|
||||
})
|
||||
expect(foundRows).toHaveLength(expected.length)
|
||||
expect(foundRows).toEqual(
|
||||
expect.arrayContaining(
|
||||
expected.map(r =>
|
||||
expect.objectContaining(savedRows.find(sr => sr.age === r.age)!)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ oneOf: { age: [1] } }).toContainExactly([{ age: 1 }]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ oneOf: { age: [2] } }).toFindNothing())
|
||||
})
|
||||
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({
|
||||
range: { age: { low: 1, high: 5 } },
|
||||
}).toContainExactly([{ age: 1 }]))
|
||||
|
||||
it("successfully finds multiple rows", () =>
|
||||
expectQuery({
|
||||
range: { age: { low: 1, high: 10 } },
|
||||
}).toContainExactly([{ age: 1 }, { age: 10 }]))
|
||||
|
||||
it("successfully finds a row with a high bound", () =>
|
||||
expectQuery({
|
||||
range: { age: { low: 5, high: 10 } },
|
||||
}).toContainExactly([{ age: 10 }]))
|
||||
|
||||
it("successfully finds no rows", () =>
|
||||
expectQuery({
|
||||
range: { age: { low: 5, high: 9 } },
|
||||
}).toFindNothing())
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
it("sorts ascending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ age: 1 }, { age: 10 }]))
|
||||
|
||||
it("sorts descending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ age: 10 }, { age: 1 }]))
|
||||
})
|
||||
|
||||
describe("sortType NUMBER", () => {
|
||||
it("sorts ascending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortType: SortType.NUMBER,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ age: 1 }, { age: 10 }]))
|
||||
|
||||
it("sorts descending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "age",
|
||||
sortType: SortType.NUMBER,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ age: 10 }, { age: 1 }]))
|
||||
})
|
||||
})
|
||||
|
||||
describe("dates", () => {
|
||||
beforeEach(async () => {
|
||||
table = await config.api.table.save(
|
||||
tableForDatasource(datasource, {
|
||||
schema: {
|
||||
dob: {
|
||||
name: "dob",
|
||||
type: FieldType.DATETIME,
|
||||
},
|
||||
},
|
||||
})
|
||||
)
|
||||
const JAN_1ST = "2020-01-01T00:00:00.000Z"
|
||||
const JAN_2ND = "2020-01-02T00:00:00.000Z"
|
||||
const JAN_5TH = "2020-01-05T00:00:00.000Z"
|
||||
const JAN_9TH = "2020-01-09T00:00:00.000Z"
|
||||
const JAN_10TH = "2020-01-10T00:00:00.000Z"
|
||||
|
||||
beforeAll(async () => {
|
||||
await createTable({
|
||||
dob: { name: "dob", type: FieldType.DATETIME },
|
||||
})
|
||||
|
||||
await createRows([{ dob: JAN_1ST }, { dob: JAN_10TH }])
|
||||
})
|
||||
|
||||
const rows = [
|
||||
{ dob: new Date("2020-01-01").toISOString() },
|
||||
{ dob: new Date("2020-01-10").toISOString() },
|
||||
]
|
||||
let savedRows: Row[]
|
||||
describe("equal", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([
|
||||
{ dob: JAN_1ST },
|
||||
]))
|
||||
|
||||
beforeEach(async () => {
|
||||
savedRows = await Promise.all(
|
||||
rows.map(r => config.api.row.save(table._id!, r))
|
||||
)
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing())
|
||||
})
|
||||
|
||||
interface DateSearchTest {
|
||||
query: SearchFilters
|
||||
expected: (typeof rows)[number][]
|
||||
}
|
||||
describe("notEqual", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([
|
||||
{ dob: JAN_10TH },
|
||||
]))
|
||||
|
||||
const dateSearchTests: DateSearchTest[] = [
|
||||
{
|
||||
query: { equal: { dob: new Date("2020-01-01").toISOString() } },
|
||||
expected: [rows[0]],
|
||||
},
|
||||
{
|
||||
query: { equal: { dob: new Date("2020-01-02").toISOString() } },
|
||||
expected: [],
|
||||
},
|
||||
{
|
||||
query: { notEqual: { dob: new Date("2020-01-01").toISOString() } },
|
||||
expected: [rows[1]],
|
||||
},
|
||||
{
|
||||
query: { oneOf: { dob: [new Date("2020-01-01").toISOString()] } },
|
||||
expected: [rows[0]],
|
||||
},
|
||||
{
|
||||
query: {
|
||||
range: {
|
||||
dob: {
|
||||
low: new Date("2020-01-01").toISOString(),
|
||||
high: new Date("2020-01-05").toISOString(),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: [rows[0]],
|
||||
},
|
||||
{
|
||||
query: {
|
||||
range: {
|
||||
dob: {
|
||||
low: new Date("2020-01-01").toISOString(),
|
||||
high: new Date("2020-01-10").toISOString(),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: rows,
|
||||
},
|
||||
{
|
||||
query: {
|
||||
range: {
|
||||
dob: {
|
||||
low: new Date("2020-01-05").toISOString(),
|
||||
high: new Date("2020-01-10").toISOString(),
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: [rows[1]],
|
||||
},
|
||||
]
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([
|
||||
{ dob: JAN_1ST },
|
||||
]))
|
||||
})
|
||||
|
||||
it.each(dateSearchTests)(
|
||||
`should be able to run query: $query`,
|
||||
async ({ query, expected }) => {
|
||||
const { rows: foundRows } = await config.api.row.search(table._id!, {
|
||||
tableId: table._id!,
|
||||
query,
|
||||
})
|
||||
expect(foundRows).toHaveLength(expected.length)
|
||||
expect(foundRows).toEqual(
|
||||
expect.arrayContaining(
|
||||
expected.map(r =>
|
||||
expect.objectContaining(savedRows.find(sr => sr.dob === r.dob)!)
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
)
|
||||
describe("oneOf", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([
|
||||
{ dob: JAN_1ST },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing())
|
||||
})
|
||||
|
||||
describe("range", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({
|
||||
range: { dob: { low: JAN_1ST, high: JAN_5TH } },
|
||||
}).toContainExactly([{ dob: JAN_1ST }]))
|
||||
|
||||
it("successfully finds multiple rows", () =>
|
||||
expectQuery({
|
||||
range: { dob: { low: JAN_1ST, high: JAN_10TH } },
|
||||
}).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
|
||||
|
||||
it("successfully finds a row with a high bound", () =>
|
||||
expectQuery({
|
||||
range: { dob: { low: JAN_5TH, high: JAN_10TH } },
|
||||
}).toContainExactly([{ dob: JAN_10TH }]))
|
||||
|
||||
it("successfully finds no rows", () =>
|
||||
expectQuery({
|
||||
range: { dob: { low: JAN_5TH, high: JAN_9TH } },
|
||||
}).toFindNothing())
|
||||
})
|
||||
|
||||
describe("sort", () => {
|
||||
it("sorts ascending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
|
||||
|
||||
it("sorts descending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
|
||||
|
||||
describe("sortType STRING", () => {
|
||||
it("sorts ascending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.ASCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
|
||||
|
||||
it("sorts descending", () =>
|
||||
expectSearch({
|
||||
query: {},
|
||||
sort: "dob",
|
||||
sortType: SortType.STRING,
|
||||
sortOrder: SortOrder.DESCENDING,
|
||||
}).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("array of strings", () => {
|
||||
beforeAll(async () => {
|
||||
await createTable({
|
||||
numbers: {
|
||||
name: "numbers",
|
||||
type: FieldType.ARRAY,
|
||||
constraints: { inclusion: ["one", "two", "three"] },
|
||||
},
|
||||
})
|
||||
await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
|
||||
})
|
||||
|
||||
describe("contains", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ contains: { numbers: ["none"] } }).toFindNothing())
|
||||
|
||||
it("fails to find row containing all", () =>
|
||||
expectQuery({
|
||||
contains: { numbers: ["one", "two", "three"] },
|
||||
}).toFindNothing())
|
||||
|
||||
it("finds all with empty list", () =>
|
||||
expectQuery({ contains: { numbers: [] } }).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
{ numbers: ["three"] },
|
||||
]))
|
||||
})
|
||||
|
||||
describe("notContains", () => {
|
||||
it("successfully finds a row", () =>
|
||||
expectQuery({ notContains: { numbers: ["one"] } }).toContainExactly([
|
||||
{ numbers: ["three"] },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({
|
||||
notContains: { numbers: ["one", "two", "three"] },
|
||||
}).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
{ numbers: ["three"] },
|
||||
]))
|
||||
|
||||
it("finds all with empty list", () =>
|
||||
expectQuery({ notContains: { numbers: [] } }).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
{ numbers: ["three"] },
|
||||
]))
|
||||
})
|
||||
|
||||
describe("containsAny", () => {
|
||||
it("successfully finds rows", () =>
|
||||
expectQuery({
|
||||
containsAny: { numbers: ["one", "two", "three"] },
|
||||
}).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
{ numbers: ["three"] },
|
||||
]))
|
||||
|
||||
it("fails to find nonexistent row", () =>
|
||||
expectQuery({ containsAny: { numbers: ["none"] } }).toFindNothing())
|
||||
|
||||
it("finds all with empty list", () =>
|
||||
expectQuery({ containsAny: { numbers: [] } }).toContainExactly([
|
||||
{ numbers: ["one", "two"] },
|
||||
{ numbers: ["three"] },
|
||||
]))
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -8,7 +8,7 @@ import {
|
|||
PermissionLevel,
|
||||
QuotaUsageType,
|
||||
SaveTableRequest,
|
||||
SearchQueryOperators,
|
||||
SearchFilterOperator,
|
||||
SortOrder,
|
||||
SortType,
|
||||
StaticQuotaName,
|
||||
|
@ -132,7 +132,7 @@ describe.each([
|
|||
primaryDisplay: generator.word(),
|
||||
query: [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "field",
|
||||
value: "value",
|
||||
},
|
||||
|
@ -236,7 +236,7 @@ describe.each([
|
|||
...view,
|
||||
query: [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "newField",
|
||||
value: "thatValue",
|
||||
},
|
||||
|
@ -263,7 +263,7 @@ describe.each([
|
|||
primaryDisplay: generator.word(),
|
||||
query: [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: generator.word(),
|
||||
value: generator.word(),
|
||||
},
|
||||
|
@ -341,7 +341,7 @@ describe.each([
|
|||
tableId: generator.guid(),
|
||||
query: [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "newField",
|
||||
value: "thatValue",
|
||||
},
|
||||
|
@ -671,7 +671,7 @@ describe.each([
|
|||
name: generator.guid(),
|
||||
query: [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "two",
|
||||
value: "bar2",
|
||||
},
|
||||
|
|
|
@ -62,7 +62,6 @@ export const definition: AutomationStepSchema = {
|
|||
}
|
||||
|
||||
export async function run({ inputs }: AutomationStepInput) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { automationId, ...fieldParams } = inputs.automation
|
||||
|
||||
if (await features.isTriggerAutomationRunEnabled()) {
|
||||
|
|
|
@ -20,6 +20,7 @@ export enum FilterTypes {
|
|||
NOT_EMPTY = "notEmpty",
|
||||
CONTAINS = "contains",
|
||||
NOT_CONTAINS = "notContains",
|
||||
CONTAINS_ANY = "containsAny",
|
||||
ONE_OF = "oneOf",
|
||||
}
|
||||
|
||||
|
@ -30,6 +31,7 @@ export const NoEmptyFilterStrings = [
|
|||
FilterTypes.NOT_EQUAL,
|
||||
FilterTypes.CONTAINS,
|
||||
FilterTypes.NOT_CONTAINS,
|
||||
FilterTypes.CONTAINS_ANY,
|
||||
]
|
||||
|
||||
export const CanSwitchTypes = [
|
||||
|
|
|
@ -40,6 +40,7 @@ export const USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${dbCore.Inte
|
|||
export const LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${dbCore.InternalTable.USER_METADATA}${SEPARATOR}`
|
||||
export const TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
|
||||
export const AUTOMATION_LOG_PREFIX = `${DocumentType.AUTOMATION_LOG}${SEPARATOR}`
|
||||
export const SQS_DATASOURCE_INTERNAL = "internal"
|
||||
export const ViewName = dbCore.ViewName
|
||||
export const InternalTables = dbCore.InternalTable
|
||||
export const UNICODE_MAX = dbCore.UNICODE_MAX
|
||||
|
|
|
@ -28,6 +28,7 @@ const DEFAULTS = {
|
|||
PLUGINS_DIR: "/plugins",
|
||||
FORKED_PROCESS_NAME: "main",
|
||||
JS_RUNNER_MEMORY_LIMIT: 64,
|
||||
COUCH_DB_SQL_URL: "http://localhost:4006",
|
||||
}
|
||||
|
||||
const QUERY_THREAD_TIMEOUT =
|
||||
|
@ -39,6 +40,7 @@ const environment = {
|
|||
// important - prefer app port to generic port
|
||||
PORT: process.env.APP_PORT || process.env.PORT,
|
||||
COUCH_DB_URL: process.env.COUCH_DB_URL,
|
||||
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || DEFAULTS.COUCH_DB_SQL_URL,
|
||||
MINIO_URL: process.env.MINIO_URL,
|
||||
WORKER_URL: process.env.WORKER_URL,
|
||||
AWS_REGION: process.env.AWS_REGION,
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
import { features } from "@budibase/backend-core"
|
||||
import env from "./environment"
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
enum AppFeature {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
API = "api",
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
AUTOMATIONS = "automations",
|
||||
}
|
||||
|
||||
|
|
|
@ -22,6 +22,8 @@ import {
|
|||
SortDirection,
|
||||
SqlQueryBinding,
|
||||
Table,
|
||||
TableSourceType,
|
||||
INTERNAL_TABLE_SOURCE_ID,
|
||||
} from "@budibase/types"
|
||||
import environment from "../../environment"
|
||||
|
||||
|
@ -135,6 +137,18 @@ function generateSelectStatement(
|
|||
})
|
||||
}
|
||||
|
||||
function getTableName(table?: Table): string | undefined {
|
||||
// SQS uses the table ID rather than the table name
|
||||
if (
|
||||
table?.sourceType === TableSourceType.INTERNAL ||
|
||||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
|
||||
) {
|
||||
return table?._id
|
||||
} else {
|
||||
return table?.name
|
||||
}
|
||||
}
|
||||
|
||||
class InternalBuilder {
|
||||
private readonly client: string
|
||||
|
||||
|
@ -149,7 +163,7 @@ class InternalBuilder {
|
|||
tableName: string,
|
||||
opts: { aliases?: Record<string, string>; relationship?: boolean }
|
||||
): Knex.QueryBuilder {
|
||||
function getTableName(name: string) {
|
||||
function getTableAlias(name: string) {
|
||||
const alias = opts.aliases?.[name]
|
||||
return alias || name
|
||||
}
|
||||
|
@ -161,11 +175,11 @@ class InternalBuilder {
|
|||
const updatedKey = dbCore.removeKeyNumbering(key)
|
||||
const isRelationshipField = updatedKey.includes(".")
|
||||
if (!opts.relationship && !isRelationshipField) {
|
||||
fn(`${getTableName(tableName)}.${updatedKey}`, value)
|
||||
fn(`${getTableAlias(tableName)}.${updatedKey}`, value)
|
||||
}
|
||||
if (opts.relationship && isRelationshipField) {
|
||||
const [filterTableName, property] = updatedKey.split(".")
|
||||
fn(`${getTableName(filterTableName)}.${property}`, value)
|
||||
fn(`${getTableAlias(filterTableName)}.${property}`, value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -233,6 +247,11 @@ class InternalBuilder {
|
|||
(statement ? andOr : "") +
|
||||
`LOWER(${likeKey(this.client, key)}) LIKE ?`
|
||||
}
|
||||
|
||||
if (statement === "") {
|
||||
return
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
query = query[rawFnc](`${not}(${statement})`, value)
|
||||
})
|
||||
|
@ -341,9 +360,10 @@ class InternalBuilder {
|
|||
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
|
||||
let { sort, paginate } = json
|
||||
const table = json.meta?.table
|
||||
const tableName = getTableName(table)
|
||||
const aliases = json.tableAliases
|
||||
const aliased =
|
||||
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
|
||||
tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
|
||||
if (sort && Object.keys(sort || {}).length > 0) {
|
||||
for (let [key, value] of Object.entries(sort)) {
|
||||
const direction =
|
||||
|
@ -724,12 +744,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
results: Record<string, any>[],
|
||||
aliases?: Record<string, string>
|
||||
): Record<string, any>[] {
|
||||
const tableName = getTableName(table)
|
||||
for (const [name, field] of Object.entries(table.schema)) {
|
||||
if (!this._isJsonColumn(field)) {
|
||||
continue
|
||||
}
|
||||
const tableName = aliases?.[table.name] || table.name
|
||||
const fullName = `${tableName}.${name}`
|
||||
const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
|
||||
const fullName = `${aliasedTableName}.${name}`
|
||||
for (let row of results) {
|
||||
if (typeof row[fullName] === "string") {
|
||||
row[fullName] = JSON.parse(row[fullName])
|
||||
|
|
|
@ -1,19 +1,20 @@
|
|||
import { Knex, knex } from "knex"
|
||||
import {
|
||||
RelationshipType,
|
||||
FieldSubtype,
|
||||
FieldType,
|
||||
NumberFieldMetadata,
|
||||
Operation,
|
||||
QueryJson,
|
||||
RelationshipType,
|
||||
RenameColumn,
|
||||
Table,
|
||||
FieldType,
|
||||
SqlQuery,
|
||||
Table,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { breakExternalTableId, getNativeSql, SqlClient } from "../utils"
|
||||
import { utils } from "@budibase/shared-core"
|
||||
import SchemaBuilder = Knex.SchemaBuilder
|
||||
import CreateTableBuilder = Knex.CreateTableBuilder
|
||||
import { utils } from "@budibase/shared-core"
|
||||
|
||||
function isIgnoredType(type: FieldType) {
|
||||
const ignored = [FieldType.LINK, FieldType.FORMULA]
|
||||
|
@ -105,13 +106,13 @@ function generateSchema(
|
|||
column.relationshipType !== RelationshipType.MANY_TO_MANY
|
||||
) {
|
||||
if (!column.foreignKey || !column.tableId) {
|
||||
throw "Invalid relationship schema"
|
||||
throw new Error("Invalid relationship schema")
|
||||
}
|
||||
const { tableName } = breakExternalTableId(column.tableId)
|
||||
// @ts-ignore
|
||||
const relatedTable = tables[tableName]
|
||||
if (!relatedTable) {
|
||||
throw "Referenced table doesn't exist"
|
||||
throw new Error("Referenced table doesn't exist")
|
||||
}
|
||||
const relatedPrimary = relatedTable.primary[0]
|
||||
const externalType = relatedTable.schema[relatedPrimary].externalType
|
||||
|
@ -209,15 +210,19 @@ class SqlTableQueryBuilder {
|
|||
|
||||
let query: Knex.SchemaBuilder
|
||||
if (!json.table || !json.meta || !json.meta.tables) {
|
||||
throw "Cannot execute without table being specified"
|
||||
throw new Error("Cannot execute without table being specified")
|
||||
}
|
||||
if (json.table.sourceType === TableSourceType.INTERNAL) {
|
||||
throw new Error("Cannot perform table actions for SQS.")
|
||||
}
|
||||
|
||||
switch (this._operation(json)) {
|
||||
case Operation.CREATE_TABLE:
|
||||
query = buildCreateTable(client, json.table, json.meta.tables)
|
||||
break
|
||||
case Operation.UPDATE_TABLE:
|
||||
if (!json.meta || !json.meta.table) {
|
||||
throw "Must specify old table for update"
|
||||
throw new Error("Must specify old table for update")
|
||||
}
|
||||
// renameColumn does not work for MySQL, so return a raw query
|
||||
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
|
||||
|
@ -264,7 +269,7 @@ class SqlTableQueryBuilder {
|
|||
query = buildDeleteTable(client, json.table)
|
||||
break
|
||||
default:
|
||||
throw "Table operation is of unknown type"
|
||||
throw new Error("Table operation is of unknown type")
|
||||
}
|
||||
return getNativeSql(query)
|
||||
}
|
||||
|
|
|
@ -168,8 +168,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
|||
return ""
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
getStringConcat(parts: string[]) {
|
||||
getStringConcat(_parts: string[]) {
|
||||
return ""
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,14 @@ import {
|
|||
} from "@budibase/types"
|
||||
|
||||
const TABLE_NAME = "test"
|
||||
const TABLE: Table = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
sourceId: "SOURCE_ID",
|
||||
schema: {},
|
||||
name: TABLE_NAME,
|
||||
primary: ["id"],
|
||||
}
|
||||
|
||||
function endpoint(table: any, operation: any) {
|
||||
return {
|
||||
|
@ -25,6 +33,10 @@ function generateReadJson({
|
|||
sort,
|
||||
paginate,
|
||||
}: any = {}): QueryJson {
|
||||
const tableObj = { ...TABLE }
|
||||
if (table) {
|
||||
tableObj.name = table
|
||||
}
|
||||
return {
|
||||
endpoint: endpoint(table || TABLE_NAME, "READ"),
|
||||
resource: {
|
||||
|
@ -34,14 +46,7 @@ function generateReadJson({
|
|||
sort: sort || {},
|
||||
paginate: paginate || {},
|
||||
meta: {
|
||||
table: {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
sourceId: "SOURCE_ID",
|
||||
schema: {},
|
||||
name: table || TABLE_NAME,
|
||||
primary: ["id"],
|
||||
} as any,
|
||||
table: tableObj,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -49,6 +54,9 @@ function generateReadJson({
|
|||
function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson {
|
||||
return {
|
||||
endpoint: endpoint(table, "CREATE"),
|
||||
meta: {
|
||||
table: TABLE,
|
||||
},
|
||||
body,
|
||||
}
|
||||
}
|
||||
|
@ -58,7 +66,15 @@ function generateUpdateJson({
|
|||
body = {},
|
||||
filters = {},
|
||||
meta = {},
|
||||
}: {
|
||||
table: string
|
||||
body?: any
|
||||
filters?: any
|
||||
meta?: any
|
||||
}): QueryJson {
|
||||
if (!meta.table) {
|
||||
meta.table = table
|
||||
}
|
||||
return {
|
||||
endpoint: endpoint(table, "UPDATE"),
|
||||
filters,
|
||||
|
@ -70,6 +86,9 @@ function generateUpdateJson({
|
|||
function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson {
|
||||
return {
|
||||
endpoint: endpoint(table, "DELETE"),
|
||||
meta: {
|
||||
table: TABLE,
|
||||
},
|
||||
filters,
|
||||
}
|
||||
}
|
||||
|
@ -102,6 +121,9 @@ function generateRelationshipJson(config: { schema?: string } = {}): QueryJson {
|
|||
},
|
||||
],
|
||||
extra: { idFilter: {} },
|
||||
meta: {
|
||||
table: TABLE,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -4,12 +4,26 @@ import {
|
|||
QueryJson,
|
||||
SourceName,
|
||||
SqlQuery,
|
||||
Table,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import { join } from "path"
|
||||
import Sql from "../base/sql"
|
||||
import { SqlClient } from "../utils"
|
||||
import AliasTables from "../../api/controllers/row/alias"
|
||||
import { generator } from "@budibase/backend-core/tests"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
// this doesn't exist strictly
|
||||
const TABLE: Table = {
|
||||
type: "table",
|
||||
sourceType: TableSourceType.EXTERNAL,
|
||||
sourceId: "SOURCE_ID",
|
||||
schema: {},
|
||||
name: "tableName",
|
||||
primary: ["id"],
|
||||
}
|
||||
|
||||
const AliasTables = sdk.rows.AliasTables
|
||||
|
||||
function multiline(sql: string) {
|
||||
return sql.replace(/\n/g, "").replace(/ +/g, " ")
|
||||
|
@ -220,6 +234,9 @@ describe("Captures of real examples", () => {
|
|||
resource: {
|
||||
fields,
|
||||
},
|
||||
meta: {
|
||||
table: TABLE,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -35,7 +35,6 @@ export function init() {
|
|||
// Because we can't pass functions into an Isolate, we remove them from
|
||||
// the passed context and rely on the withHelpers() method to add them
|
||||
// back in.
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { helpers, snippets, ...rest } = ctx
|
||||
return vm.withContext(rest, () => vm.execute(js))
|
||||
} catch (error: any) {
|
||||
|
|
|
@ -3,6 +3,7 @@ import * as rows from "./rows"
|
|||
import * as search from "./search"
|
||||
import * as utils from "./utils"
|
||||
import * as external from "./external"
|
||||
import AliasTables from "./sqlAlias"
|
||||
|
||||
export default {
|
||||
...attachments,
|
||||
|
@ -10,4 +11,5 @@ export default {
|
|||
...search,
|
||||
utils,
|
||||
external,
|
||||
AliasTables,
|
||||
}
|
||||
|
|
|
@ -13,6 +13,8 @@ import * as sqs from "./search/sqs"
|
|||
import env from "../../../environment"
|
||||
import { ExportRowsParams, ExportRowsResult } from "./search/types"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
import sdk from "../../index"
|
||||
import { searchInputMapping } from "./search/utils"
|
||||
|
||||
export { isValidFilter } from "../../../integrations/utils"
|
||||
|
||||
|
@ -29,6 +31,10 @@ function pickApi(tableId: any) {
|
|||
return internal
|
||||
}
|
||||
|
||||
function isEmptyArray(value: any) {
|
||||
return Array.isArray(value) && value.length === 0
|
||||
}
|
||||
|
||||
// don't do a pure falsy check, as 0 is included
|
||||
// https://github.com/Budibase/budibase/issues/10118
|
||||
export function removeEmptyFilters(filters: SearchFilters) {
|
||||
|
@ -47,7 +53,7 @@ export function removeEmptyFilters(filters: SearchFilters) {
|
|||
for (let [key, value] of Object.entries(
|
||||
filters[filterType] as object
|
||||
)) {
|
||||
if (value == null || value === "") {
|
||||
if (value == null || value === "" || isEmptyArray(value)) {
|
||||
// @ts-ignore
|
||||
delete filters[filterField][key]
|
||||
}
|
||||
|
@ -72,12 +78,15 @@ export async function search(
|
|||
}
|
||||
}
|
||||
|
||||
const table = await sdk.tables.getTable(options.tableId)
|
||||
options = searchInputMapping(table, options)
|
||||
|
||||
if (isExternalTable) {
|
||||
return external.search(options)
|
||||
return external.search(options, table)
|
||||
} else if (env.SQS_SEARCH_ENABLE) {
|
||||
return sqs.search(options)
|
||||
return sqs.search(options, table)
|
||||
} else {
|
||||
return internal.search(options)
|
||||
return internal.search(options, table)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import {
|
|||
SearchFilters,
|
||||
RowSearchParams,
|
||||
SearchResponse,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import * as exporters from "../../../../api/controllers/view/exporters"
|
||||
import { handleRequest } from "../../../../api/controllers/row/external"
|
||||
|
@ -18,13 +19,13 @@ import {
|
|||
import { utils } from "@budibase/shared-core"
|
||||
import { ExportRowsParams, ExportRowsResult } from "./types"
|
||||
import { HTTPError, db } from "@budibase/backend-core"
|
||||
import { searchInputMapping } from "./utils"
|
||||
import pick from "lodash/pick"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import sdk from "../../../"
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId } = options
|
||||
const { paginate, query, ...params } = options
|
||||
|
@ -68,8 +69,6 @@ export async function search(
|
|||
}
|
||||
|
||||
try {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
options = searchInputMapping(table, options)
|
||||
let rows = await handleRequest(Operation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
|
@ -150,11 +149,15 @@ export async function exportRows(
|
|||
}
|
||||
|
||||
const datasource = await sdk.datasources.get(datasourceId!)
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
if (!datasource || !datasource.entities) {
|
||||
throw new HTTPError("Datasource has not been configured for plus API.", 400)
|
||||
}
|
||||
|
||||
let result = await search({ tableId, query: requestQuery, sort, sortOrder })
|
||||
let result = await search(
|
||||
{ tableId, query: requestQuery, sort, sortOrder },
|
||||
table
|
||||
)
|
||||
let rows: Row[] = []
|
||||
let headers
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { context, db, HTTPError } from "@budibase/backend-core"
|
||||
import env from "../../../../environment"
|
||||
import { fullSearch, paginatedSearch, searchInputMapping } from "./utils"
|
||||
import { fullSearch, paginatedSearch } from "./utils"
|
||||
import { getRowParams, InternalTables } from "../../../../db/utils"
|
||||
import {
|
||||
Database,
|
||||
|
@ -33,7 +33,8 @@ import pick from "lodash/pick"
|
|||
import { breakRowIdField } from "../../../../integrations/utils"
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId } = options
|
||||
|
||||
|
@ -51,8 +52,6 @@ export async function search(
|
|||
query: {},
|
||||
}
|
||||
|
||||
let table = await sdk.tables.getTable(tableId)
|
||||
options = searchInputMapping(table, options)
|
||||
if (params.sort && !params.sortType) {
|
||||
const schema = table.schema
|
||||
const sortField = schema[params.sort]
|
||||
|
@ -122,12 +121,15 @@ export async function exportRows(
|
|||
|
||||
result = await outputProcessing<Row[]>(table, response)
|
||||
} else if (query) {
|
||||
let searchResponse = await search({
|
||||
tableId,
|
||||
query,
|
||||
sort,
|
||||
sortOrder,
|
||||
})
|
||||
let searchResponse = await search(
|
||||
{
|
||||
tableId,
|
||||
query,
|
||||
sort,
|
||||
sortOrder,
|
||||
},
|
||||
table
|
||||
)
|
||||
result = searchResponse.rows
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,12 @@ import {
|
|||
} from "../../../../api/controllers/row/utils"
|
||||
import sdk from "../../../index"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
|
||||
import {
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
SQS_DATASOURCE_INTERNAL,
|
||||
} from "../../../../db/utils"
|
||||
import AliasTables from "../sqlAlias"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
|
||||
function buildInternalFieldList(
|
||||
table: Table,
|
||||
|
@ -31,19 +36,19 @@ function buildInternalFieldList(
|
|||
fieldList = fieldList.concat(
|
||||
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
|
||||
)
|
||||
if (opts.relationships) {
|
||||
for (let col of Object.values(table.schema)) {
|
||||
if (col.type === FieldType.LINK) {
|
||||
const linkCol = col as RelationshipFieldMetadata
|
||||
const relatedTable = tables.find(
|
||||
table => table._id === linkCol.tableId
|
||||
)!
|
||||
fieldList = fieldList.concat(
|
||||
buildInternalFieldList(relatedTable, tables, { relationships: false })
|
||||
)
|
||||
} else {
|
||||
fieldList.push(`${table._id}.${col.name}`)
|
||||
}
|
||||
for (let col of Object.values(table.schema)) {
|
||||
const isRelationship = col.type === FieldType.LINK
|
||||
if (!opts.relationships && isRelationship) {
|
||||
continue
|
||||
}
|
||||
if (isRelationship) {
|
||||
const linkCol = col as RelationshipFieldMetadata
|
||||
const relatedTable = tables.find(table => table._id === linkCol.tableId)!
|
||||
fieldList = fieldList.concat(
|
||||
buildInternalFieldList(relatedTable, tables, { relationships: false })
|
||||
)
|
||||
} else {
|
||||
fieldList.push(`${table._id}.${col.name}`)
|
||||
}
|
||||
}
|
||||
return fieldList
|
||||
|
@ -94,14 +99,14 @@ function buildTableMap(tables: Table[]) {
|
|||
}
|
||||
|
||||
export async function search(
|
||||
options: RowSearchParams
|
||||
options: RowSearchParams,
|
||||
table: Table
|
||||
): Promise<SearchResponse<Row>> {
|
||||
const { tableId, paginate, query, ...params } = options
|
||||
const { paginate, query, ...params } = options
|
||||
|
||||
const builder = new SqlQueryBuilder(SqlClient.SQL_LITE)
|
||||
const allTables = await sdk.tables.getAllInternalTables()
|
||||
const allTablesMap = buildTableMap(allTables)
|
||||
const table = allTables.find(table => table._id === tableId)
|
||||
if (!table) {
|
||||
throw new Error("Unable to find table")
|
||||
}
|
||||
|
@ -111,7 +116,7 @@ export async function search(
|
|||
const request: QueryJson = {
|
||||
endpoint: {
|
||||
// not important, we query ourselves
|
||||
datasourceId: "internal",
|
||||
datasourceId: SQS_DATASOURCE_INTERNAL,
|
||||
entityId: table._id!,
|
||||
operation: Operation.READ,
|
||||
},
|
||||
|
@ -132,7 +137,7 @@ export async function search(
|
|||
type: "row",
|
||||
}
|
||||
|
||||
if (params.sort && !params.sortType) {
|
||||
if (params.sort) {
|
||||
const sortField = table.schema[params.sort]
|
||||
const sortType =
|
||||
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
|
||||
|
@ -154,34 +159,44 @@ export async function search(
|
|||
}
|
||||
}
|
||||
try {
|
||||
const query = builder._query(request, {
|
||||
disableReturning: true,
|
||||
const alias = new AliasTables(allTables.map(table => table.name))
|
||||
const rows = await alias.queryWithAliasing(request, async json => {
|
||||
const query = builder._query(json, {
|
||||
disableReturning: true,
|
||||
})
|
||||
|
||||
if (Array.isArray(query)) {
|
||||
throw new Error("SQS cannot currently handle multiple queries")
|
||||
}
|
||||
|
||||
let sql = query.sql,
|
||||
bindings = query.bindings
|
||||
|
||||
// quick hack for docIds
|
||||
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
|
||||
sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`")
|
||||
|
||||
const db = context.getAppDB()
|
||||
return await db.sql<Row>(sql, bindings)
|
||||
})
|
||||
|
||||
if (Array.isArray(query)) {
|
||||
throw new Error("SQS cannot currently handle multiple queries")
|
||||
}
|
||||
|
||||
let sql = query.sql,
|
||||
bindings = query.bindings
|
||||
|
||||
// quick hack for docIds
|
||||
sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
|
||||
sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`")
|
||||
|
||||
const db = context.getAppDB()
|
||||
const rows = await db.sql<Row>(sql, bindings)
|
||||
// process from the format of tableId.column to expected format
|
||||
const processed = await sqlOutputProcessing(
|
||||
rows,
|
||||
table!,
|
||||
allTablesMap,
|
||||
relationships,
|
||||
{
|
||||
sqs: true,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
rows: await sqlOutputProcessing(
|
||||
rows,
|
||||
table!,
|
||||
allTablesMap,
|
||||
relationships,
|
||||
{
|
||||
sqs: true,
|
||||
}
|
||||
),
|
||||
// final row processing for response
|
||||
rows: await outputProcessing<Row[]>(table, processed, {
|
||||
preserveLinks: true,
|
||||
squash: true,
|
||||
}),
|
||||
}
|
||||
} catch (err: any) {
|
||||
const msg = typeof err === "string" ? err : err.message
|
||||
|
|
|
@ -112,7 +112,7 @@ describe("external search", () => {
|
|||
tableId,
|
||||
query: {},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const result = await search(searchParams, config.table!)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
|
@ -130,7 +130,7 @@ describe("external search", () => {
|
|||
query: {},
|
||||
fields: ["name", "age"],
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const result = await search(searchParams, config.table!)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
|
@ -157,7 +157,7 @@ describe("external search", () => {
|
|||
},
|
||||
},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const result = await search(searchParams, config.table!)
|
||||
|
||||
expect(result.rows).toHaveLength(3)
|
||||
expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
|
||||
|
|
|
@ -81,7 +81,7 @@ describe("internal", () => {
|
|||
tableId,
|
||||
query: {},
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const result = await search(searchParams, config.table!)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
|
@ -99,7 +99,7 @@ describe("internal", () => {
|
|||
query: {},
|
||||
fields: ["name", "age"],
|
||||
}
|
||||
const result = await search(searchParams)
|
||||
const result = await search(searchParams, config.table!)
|
||||
|
||||
expect(result.rows).toHaveLength(10)
|
||||
expect(result.rows).toEqual(
|
||||
|
|
|
@ -160,7 +160,7 @@ describe("internal search", () => {
|
|||
const response = await search.paginatedSearch(
|
||||
{
|
||||
contains: {
|
||||
column: "a",
|
||||
column: ["a"],
|
||||
colArr: [1, 2, 3],
|
||||
},
|
||||
},
|
||||
|
@ -168,7 +168,7 @@ describe("internal search", () => {
|
|||
)
|
||||
checkLucene(
|
||||
response,
|
||||
`(*:* AND column:a AND colArr:(1 AND 2 AND 3))`,
|
||||
`(*:* AND column:(a) AND colArr:(1 AND 2 AND 3))`,
|
||||
PARAMS
|
||||
)
|
||||
})
|
|
@ -6,11 +6,12 @@ import {
|
|||
Row,
|
||||
SearchFilters,
|
||||
} from "@budibase/types"
|
||||
import { getSQLClient } from "../../../sdk/app/rows/utils"
|
||||
import { getSQLClient } from "./utils"
|
||||
import { cloneDeep } from "lodash"
|
||||
import sdk from "../../../sdk"
|
||||
import datasources from "../datasources"
|
||||
import { makeExternalQuery } from "../../../integrations/base/query"
|
||||
import { SqlClient } from "../../../integrations/utils"
|
||||
import { SQS_DATASOURCE_INTERNAL } from "../../../db/utils"
|
||||
|
||||
const WRITE_OPERATIONS: Operation[] = [
|
||||
Operation.CREATE,
|
||||
|
@ -156,12 +157,19 @@ export default class AliasTables {
|
|||
}
|
||||
|
||||
async queryWithAliasing(
|
||||
json: QueryJson
|
||||
json: QueryJson,
|
||||
queryFn?: (json: QueryJson) => Promise<DatasourcePlusQueryResponse>
|
||||
): Promise<DatasourcePlusQueryResponse> {
|
||||
const datasourceId = json.endpoint.datasourceId
|
||||
const datasource = await sdk.datasources.get(datasourceId)
|
||||
const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
|
||||
let aliasingEnabled: boolean, datasource: Datasource | undefined
|
||||
if (isSqs) {
|
||||
aliasingEnabled = true
|
||||
} else {
|
||||
datasource = await datasources.get(datasourceId)
|
||||
aliasingEnabled = this.isAliasingEnabled(json, datasource)
|
||||
}
|
||||
|
||||
const aliasingEnabled = this.isAliasingEnabled(json, datasource)
|
||||
if (aliasingEnabled) {
|
||||
json = cloneDeep(json)
|
||||
// run through the query json to update anywhere a table may be used
|
||||
|
@ -207,7 +215,15 @@ export default class AliasTables {
|
|||
}
|
||||
json.tableAliases = invertedTableAliases
|
||||
}
|
||||
const response = await makeExternalQuery(datasource, json)
|
||||
|
||||
let response: DatasourcePlusQueryResponse
|
||||
if (datasource && !isSqs) {
|
||||
response = await makeExternalQuery(datasource, json)
|
||||
} else if (queryFn) {
|
||||
response = await queryFn(json)
|
||||
} else {
|
||||
throw new Error("No supplied method to perform aliased query")
|
||||
}
|
||||
if (Array.isArray(response) && aliasingEnabled) {
|
||||
return this.reverse(response)
|
||||
} else {
|
|
@ -1,8 +1,19 @@
|
|||
import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
|
||||
import { FieldType, SQLiteDefinition, SQLiteType, Table } from "@budibase/types"
|
||||
import {
|
||||
FieldType,
|
||||
RelationshipFieldMetadata,
|
||||
SQLiteDefinition,
|
||||
SQLiteTable,
|
||||
SQLiteTables,
|
||||
SQLiteType,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import { cloneDeep } from "lodash"
|
||||
import tablesSdk from "../"
|
||||
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
|
||||
import {
|
||||
CONSTANT_INTERNAL_ROW_COLS,
|
||||
generateJunctionTableID,
|
||||
} from "../../../../db/utils"
|
||||
|
||||
const BASIC_SQLITE_DOC: SQLiteDefinition = {
|
||||
_id: SQLITE_DESIGN_DOC_ID,
|
||||
|
@ -36,9 +47,42 @@ const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
|||
[FieldType.BB_REFERENCE]: SQLiteType.TEXT,
|
||||
}
|
||||
|
||||
function mapTable(table: Table): { [key: string]: SQLiteType } {
|
||||
function buildRelationshipDefinitions(
|
||||
table: Table,
|
||||
relationshipColumn: RelationshipFieldMetadata
|
||||
): {
|
||||
tableId: string
|
||||
definition: SQLiteTable
|
||||
} {
|
||||
const tableId = table._id!,
|
||||
relatedTableId = relationshipColumn.tableId
|
||||
return {
|
||||
tableId: generateJunctionTableID(tableId, relatedTableId),
|
||||
definition: {
|
||||
["doc1.rowId"]: SQLiteType.TEXT,
|
||||
["doc1.tableId"]: SQLiteType.TEXT,
|
||||
["doc1.fieldName"]: SQLiteType.TEXT,
|
||||
["doc2.rowId"]: SQLiteType.TEXT,
|
||||
["doc2.tableId"]: SQLiteType.TEXT,
|
||||
["doc2.fieldName"]: SQLiteType.TEXT,
|
||||
tableId: SQLiteType.TEXT,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
// this can generate relationship tables as part of the mapping
|
||||
function mapTable(table: Table): SQLiteTables {
|
||||
const tables: SQLiteTables = {}
|
||||
const fields: Record<string, SQLiteType> = {}
|
||||
for (let [key, column] of Object.entries(table.schema)) {
|
||||
// relationships should be handled differently
|
||||
if (column.type === FieldType.LINK) {
|
||||
const { tableId, definition } = buildRelationshipDefinitions(
|
||||
table,
|
||||
column
|
||||
)
|
||||
tables[tableId] = { fields: definition }
|
||||
}
|
||||
if (!FieldTypeMap[column.type]) {
|
||||
throw new Error(`Unable to map type "${column.type}" to SQLite type`)
|
||||
}
|
||||
|
@ -49,10 +93,12 @@ function mapTable(table: Table): { [key: string]: SQLiteType } {
|
|||
CONSTANT_INTERNAL_ROW_COLS.forEach(col => {
|
||||
constantMap[col] = SQLiteType.TEXT
|
||||
})
|
||||
return {
|
||||
const thisTable: SQLiteTable = {
|
||||
...constantMap,
|
||||
...fields,
|
||||
}
|
||||
tables[table._id!] = { fields: thisTable }
|
||||
return tables
|
||||
}
|
||||
|
||||
// nothing exists, need to iterate though existing tables
|
||||
|
@ -60,8 +106,9 @@ async function buildBaseDefinition(): Promise<SQLiteDefinition> {
|
|||
const tables = await tablesSdk.getAllInternalTables()
|
||||
const definition = cloneDeep(BASIC_SQLITE_DOC)
|
||||
for (let table of tables) {
|
||||
definition.sql.tables[table._id!] = {
|
||||
fields: mapTable(table),
|
||||
definition.sql.tables = {
|
||||
...definition.sql.tables,
|
||||
...mapTable(table),
|
||||
}
|
||||
}
|
||||
return definition
|
||||
|
@ -75,8 +122,9 @@ export async function addTableToSqlite(table: Table) {
|
|||
} catch (err) {
|
||||
definition = await buildBaseDefinition()
|
||||
}
|
||||
definition.sql.tables[table._id!] = {
|
||||
fields: mapTable(table),
|
||||
definition.sql.tables = {
|
||||
...definition.sql.tables,
|
||||
...mapTable(table),
|
||||
}
|
||||
await db.put(definition)
|
||||
}
|
||||
|
|
|
@ -351,7 +351,6 @@ describe("table sdk", () => {
|
|||
const view: ViewV2 = {
|
||||
...basicView,
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { name, description, ...newTableSchema } = basicTable.schema
|
||||
|
||||
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
|
||||
|
@ -365,7 +364,6 @@ describe("table sdk", () => {
|
|||
const view: ViewV2 = {
|
||||
...basicView,
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { description, ...newTableSchema } = {
|
||||
...basicTable.schema,
|
||||
updatedDescription: {
|
||||
|
@ -450,7 +448,6 @@ describe("table sdk", () => {
|
|||
hiddenField: { visible: false },
|
||||
},
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { name, description, ...newTableSchema } = basicTable.schema
|
||||
|
||||
const result = syncSchema(_.cloneDeep(view), newTableSchema, undefined)
|
||||
|
@ -474,7 +471,6 @@ describe("table sdk", () => {
|
|||
hiddenField: { visible: false },
|
||||
},
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { name, description, ...newTableSchema } = {
|
||||
...basicTable.schema,
|
||||
newField1: {
|
||||
|
@ -506,7 +502,6 @@ describe("table sdk", () => {
|
|||
hiddenField: { visible: false },
|
||||
},
|
||||
}
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { description, ...newTableSchema } = {
|
||||
...basicTable.schema,
|
||||
updatedDescription: {
|
||||
|
|
|
@ -60,7 +60,10 @@ export class DatasourceAPI extends TestAPI {
|
|||
})
|
||||
}
|
||||
|
||||
query = async (query: QueryJson, expectations?: Expectations) => {
|
||||
query = async (
|
||||
query: Omit<QueryJson, "meta">,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
return await this._post<any>(`/api/datasources/query`, {
|
||||
body: query,
|
||||
expectations,
|
||||
|
|
|
@ -262,13 +262,11 @@ export class BaseSocket {
|
|||
}
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async onConnect(socket: Socket) {
|
||||
async onConnect(_socket: Socket) {
|
||||
// Override
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
async onDisconnect(socket: Socket) {
|
||||
async onDisconnect(_socket: Socket) {
|
||||
// Override
|
||||
}
|
||||
|
||||
|
|
|
@ -2,10 +2,11 @@ import {
|
|||
Datasource,
|
||||
FieldSubtype,
|
||||
FieldType,
|
||||
FormulaType,
|
||||
SearchFilter,
|
||||
SearchQuery,
|
||||
SearchFilters,
|
||||
SearchQueryFields,
|
||||
SearchQueryOperators,
|
||||
SearchFilterOperator,
|
||||
SortDirection,
|
||||
SortType,
|
||||
} from "@budibase/types"
|
||||
|
@ -19,9 +20,13 @@ const HBS_REGEX = /{{([^{].*?)}}/g
|
|||
* Returns the valid operator options for a certain data type
|
||||
*/
|
||||
export const getValidOperatorsForType = (
|
||||
fieldType: { type: FieldType; subtype?: FieldSubtype },
|
||||
fieldType: {
|
||||
type: FieldType
|
||||
subtype?: FieldSubtype
|
||||
formulaType?: FormulaType
|
||||
},
|
||||
field: string,
|
||||
datasource: Datasource & { tableId: any } // TODO: is this table id ever populated?
|
||||
datasource: Datasource & { tableId: any }
|
||||
) => {
|
||||
const Op = OperatorOptions
|
||||
const stringOps = [
|
||||
|
@ -46,7 +51,7 @@ export const getValidOperatorsForType = (
|
|||
value: string
|
||||
label: string
|
||||
}[] = []
|
||||
const { type, subtype } = fieldType
|
||||
const { type, subtype, formulaType } = fieldType
|
||||
if (type === FieldType.STRING) {
|
||||
ops = stringOps
|
||||
} else if (type === FieldType.NUMBER || type === FieldType.BIGINT) {
|
||||
|
@ -61,7 +66,7 @@ export const getValidOperatorsForType = (
|
|||
ops = stringOps
|
||||
} else if (type === FieldType.DATETIME) {
|
||||
ops = numOps
|
||||
} else if (type === FieldType.FORMULA) {
|
||||
} else if (type === FieldType.FORMULA && formulaType === FormulaType.STATIC) {
|
||||
ops = stringOps.concat([Op.MoreThan, Op.LessThan])
|
||||
} else if (type === FieldType.BB_REFERENCE && subtype == FieldSubtype.USER) {
|
||||
ops = [Op.Equals, Op.NotEquals, Op.Empty, Op.NotEmpty, Op.In]
|
||||
|
@ -94,18 +99,19 @@ export const NoEmptyFilterStrings = [
|
|||
* Removes any fields that contain empty strings that would cause inconsistent
|
||||
* behaviour with how backend tables are filtered (no value means no filter).
|
||||
*/
|
||||
const cleanupQuery = (query: SearchQuery) => {
|
||||
const cleanupQuery = (query: SearchFilters) => {
|
||||
if (!query) {
|
||||
return query
|
||||
}
|
||||
for (let filterField of NoEmptyFilterStrings) {
|
||||
if (!query[filterField]) {
|
||||
const operator = filterField as SearchFilterOperator
|
||||
if (!query[operator]) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (let [key, value] of Object.entries(query[filterField]!)) {
|
||||
for (let [key, value] of Object.entries(query[operator]!)) {
|
||||
if (value == null || value === "") {
|
||||
delete query[filterField]![key]
|
||||
delete query[operator]![key]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -115,9 +121,10 @@ const cleanupQuery = (query: SearchQuery) => {
|
|||
/**
|
||||
* Removes a numeric prefix on field names designed to give fields uniqueness
|
||||
*/
|
||||
const removeKeyNumbering = (key: string) => {
|
||||
export const removeKeyNumbering = (key: string): string => {
|
||||
if (typeof key === "string" && key.match(/\d[0-9]*:/g) != null) {
|
||||
const parts = key.split(":")
|
||||
// remove the number
|
||||
parts.shift()
|
||||
return parts.join(":")
|
||||
} else {
|
||||
|
@ -130,7 +137,7 @@ const removeKeyNumbering = (key: string) => {
|
|||
* @param filter the builder filter structure
|
||||
*/
|
||||
export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
||||
let query: SearchQuery = {
|
||||
let query: SearchFilters = {
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
|
@ -151,6 +158,7 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
|||
filter.forEach(expression => {
|
||||
let { operator, field, type, value, externalType, onEmptyFilter } =
|
||||
expression
|
||||
const queryOperator = operator as SearchFilterOperator
|
||||
const isHbs =
|
||||
typeof value === "string" && (value.match(HBS_REGEX) || []).length > 0
|
||||
// Parse all values into correct types
|
||||
|
@ -165,8 +173,8 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
|||
if (
|
||||
type === "datetime" &&
|
||||
!isHbs &&
|
||||
operator !== "empty" &&
|
||||
operator !== "notEmpty"
|
||||
queryOperator !== "empty" &&
|
||||
queryOperator !== "notEmpty"
|
||||
) {
|
||||
// Ensure date value is a valid date and parse into correct format
|
||||
if (!value) {
|
||||
|
@ -179,7 +187,7 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
|||
}
|
||||
}
|
||||
if (type === "number" && typeof value === "string" && !isHbs) {
|
||||
if (operator === "oneOf") {
|
||||
if (queryOperator === "oneOf") {
|
||||
value = value.split(",").map(item => parseFloat(item))
|
||||
} else {
|
||||
value = parseFloat(value)
|
||||
|
@ -219,24 +227,24 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
|||
) {
|
||||
query.range[field].high = value
|
||||
}
|
||||
} else if (query[operator] && operator !== "onEmptyFilter") {
|
||||
} else if (query[queryOperator] && operator !== "onEmptyFilter") {
|
||||
if (type === "boolean") {
|
||||
// Transform boolean filters to cope with null.
|
||||
// "equals false" needs to be "not equals true"
|
||||
// "not equals false" needs to be "equals true"
|
||||
if (operator === "equal" && value === false) {
|
||||
if (queryOperator === "equal" && value === false) {
|
||||
query.notEqual = query.notEqual || {}
|
||||
query.notEqual[field] = true
|
||||
} else if (operator === "notEqual" && value === false) {
|
||||
} else if (queryOperator === "notEqual" && value === false) {
|
||||
query.equal = query.equal || {}
|
||||
query.equal[field] = true
|
||||
} else {
|
||||
query[operator] = query[operator] || {}
|
||||
query[operator]![field] = value
|
||||
query[queryOperator] = query[queryOperator] || {}
|
||||
query[queryOperator]![field] = value
|
||||
}
|
||||
} else {
|
||||
query[operator] = query[operator] || {}
|
||||
query[operator]![field] = value
|
||||
query[queryOperator] = query[queryOperator] || {}
|
||||
query[queryOperator]![field] = value
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -249,7 +257,7 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
|||
* @param docs the data
|
||||
* @param query the JSON lucene query
|
||||
*/
|
||||
export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
||||
export const runLuceneQuery = (docs: any[], query?: SearchFilters) => {
|
||||
if (!docs || !Array.isArray(docs)) {
|
||||
return []
|
||||
}
|
||||
|
@ -263,7 +271,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
// Iterates over a set of filters and evaluates a fail function against a doc
|
||||
const match =
|
||||
(
|
||||
type: keyof SearchQueryFields,
|
||||
type: SearchFilterOperator,
|
||||
failFn: (docValue: any, testValue: any) => boolean
|
||||
) =>
|
||||
(doc: any) => {
|
||||
|
@ -280,7 +288,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process a string match (fails if the value does not start with the string)
|
||||
const stringMatch = match(
|
||||
SearchQueryOperators.STRING,
|
||||
SearchFilterOperator.STRING,
|
||||
(docValue: string, testValue: string) => {
|
||||
return (
|
||||
!docValue ||
|
||||
|
@ -291,7 +299,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process a fuzzy match (treat the same as starts with when running locally)
|
||||
const fuzzyMatch = match(
|
||||
SearchQueryOperators.FUZZY,
|
||||
SearchFilterOperator.FUZZY,
|
||||
(docValue: string, testValue: string) => {
|
||||
return (
|
||||
!docValue ||
|
||||
|
@ -302,7 +310,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process a range match
|
||||
const rangeMatch = match(
|
||||
SearchQueryOperators.RANGE,
|
||||
SearchFilterOperator.RANGE,
|
||||
(
|
||||
docValue: string | number | null,
|
||||
testValue: { low: number; high: number }
|
||||
|
@ -325,7 +333,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process an equal match (fails if the value is different)
|
||||
const equalMatch = match(
|
||||
SearchQueryOperators.EQUAL,
|
||||
SearchFilterOperator.EQUAL,
|
||||
(docValue: any, testValue: string | null) => {
|
||||
return testValue != null && testValue !== "" && docValue !== testValue
|
||||
}
|
||||
|
@ -333,7 +341,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process a not-equal match (fails if the value is the same)
|
||||
const notEqualMatch = match(
|
||||
SearchQueryOperators.NOT_EQUAL,
|
||||
SearchFilterOperator.NOT_EQUAL,
|
||||
(docValue: any, testValue: string | null) => {
|
||||
return testValue != null && testValue !== "" && docValue === testValue
|
||||
}
|
||||
|
@ -341,7 +349,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process an empty match (fails if the value is not empty)
|
||||
const emptyMatch = match(
|
||||
SearchQueryOperators.EMPTY,
|
||||
SearchFilterOperator.EMPTY,
|
||||
(docValue: string | null) => {
|
||||
return docValue != null && docValue !== ""
|
||||
}
|
||||
|
@ -349,7 +357,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process a not-empty match (fails is the value is empty)
|
||||
const notEmptyMatch = match(
|
||||
SearchQueryOperators.NOT_EMPTY,
|
||||
SearchFilterOperator.NOT_EMPTY,
|
||||
(docValue: string | null) => {
|
||||
return docValue == null || docValue === ""
|
||||
}
|
||||
|
@ -357,7 +365,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
|
||||
// Process an includes match (fails if the value is not included)
|
||||
const oneOf = match(
|
||||
SearchQueryOperators.ONE_OF,
|
||||
SearchFilterOperator.ONE_OF,
|
||||
(docValue: any, testValue: any) => {
|
||||
if (typeof testValue === "string") {
|
||||
testValue = testValue.split(",")
|
||||
|
@ -370,28 +378,28 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
)
|
||||
|
||||
const containsAny = match(
|
||||
SearchQueryOperators.CONTAINS_ANY,
|
||||
SearchFilterOperator.CONTAINS_ANY,
|
||||
(docValue: any, testValue: any) => {
|
||||
return !docValue?.includes(...testValue)
|
||||
}
|
||||
)
|
||||
|
||||
const contains = match(
|
||||
SearchQueryOperators.CONTAINS,
|
||||
SearchFilterOperator.CONTAINS,
|
||||
(docValue: string | any[], testValue: any[]) => {
|
||||
return !testValue?.every((item: any) => docValue?.includes(item))
|
||||
}
|
||||
)
|
||||
|
||||
const notContains = match(
|
||||
SearchQueryOperators.NOT_CONTAINS,
|
||||
SearchFilterOperator.NOT_CONTAINS,
|
||||
(docValue: string | any[], testValue: any[]) => {
|
||||
return testValue?.every((item: any) => docValue?.includes(item))
|
||||
}
|
||||
)
|
||||
|
||||
const docMatch = (doc: any) => {
|
||||
const filterFunctions: Record<SearchQueryOperators, (doc: any) => boolean> =
|
||||
const filterFunctions: Record<SearchFilterOperator, (doc: any) => boolean> =
|
||||
{
|
||||
string: stringMatch,
|
||||
fuzzy: fuzzyMatch,
|
||||
|
@ -406,7 +414,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => {
|
|||
notContains: notContains,
|
||||
}
|
||||
|
||||
const activeFilterKeys: SearchQueryOperators[] = Object.entries(query || {})
|
||||
const activeFilterKeys: SearchFilterOperator[] = Object.entries(query || {})
|
||||
.filter(
|
||||
([key, value]: [string, any]) =>
|
||||
!["allOr", "onEmptyFilter"].includes(key) &&
|
||||
|
@ -474,7 +482,7 @@ export const luceneLimit = (docs: any[], limit: string) => {
|
|||
return docs.slice(0, numLimit)
|
||||
}
|
||||
|
||||
export const hasFilters = (query?: SearchQuery) => {
|
||||
export const hasFilters = (query?: SearchFilters) => {
|
||||
if (!query) {
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import {
|
||||
SearchQuery,
|
||||
SearchQueryOperators,
|
||||
SearchFilters,
|
||||
SearchFilterOperator,
|
||||
FieldType,
|
||||
SearchFilter,
|
||||
} from "@budibase/types"
|
||||
|
@ -46,8 +46,8 @@ describe("runLuceneQuery", () => {
|
|||
},
|
||||
]
|
||||
|
||||
function buildQuery(filters: { [filterKey: string]: any }): SearchQuery {
|
||||
const query: SearchQuery = {
|
||||
function buildQuery(filters: { [filterKey: string]: any }): SearchFilters {
|
||||
const query: SearchFilters = {
|
||||
string: {},
|
||||
fuzzy: {},
|
||||
range: {},
|
||||
|
@ -63,7 +63,7 @@ describe("runLuceneQuery", () => {
|
|||
}
|
||||
|
||||
for (const filterKey in filters) {
|
||||
query[filterKey as SearchQueryOperators] = filters[filterKey]
|
||||
query[filterKey as SearchFilterOperator] = filters[filterKey]
|
||||
}
|
||||
|
||||
return query
|
||||
|
@ -265,13 +265,13 @@ describe("buildLuceneQuery", () => {
|
|||
it("should parseFloat if the type is a number, but the value is a numeric string", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "1212",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.ONE_OF,
|
||||
operator: SearchFilterOperator.ONE_OF,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "1000,1212,3400",
|
||||
|
@ -299,13 +299,13 @@ describe("buildLuceneQuery", () => {
|
|||
it("should not parseFloat if the type is a number, but the value is a handlebars binding string", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "{{ customer_id }}",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.ONE_OF,
|
||||
operator: SearchFilterOperator.ONE_OF,
|
||||
field: "customer_id",
|
||||
type: FieldType.NUMBER,
|
||||
value: "{{ list_of_customer_ids }}",
|
||||
|
@ -333,19 +333,19 @@ describe("buildLuceneQuery", () => {
|
|||
it("should cast string to boolean if the type is boolean", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "a",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "not_true",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.NOT_EQUAL,
|
||||
operator: SearchFilterOperator.NOT_EQUAL,
|
||||
field: "b",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "not_true",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.EQUAL,
|
||||
operator: SearchFilterOperator.EQUAL,
|
||||
field: "c",
|
||||
type: FieldType.BOOLEAN,
|
||||
value: "true",
|
||||
|
@ -374,19 +374,19 @@ describe("buildLuceneQuery", () => {
|
|||
it("should split the string for contains operators", () => {
|
||||
const filter: SearchFilter[] = [
|
||||
{
|
||||
operator: SearchQueryOperators.CONTAINS,
|
||||
operator: SearchFilterOperator.CONTAINS,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.NOT_CONTAINS,
|
||||
operator: SearchFilterOperator.NOT_CONTAINS,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
},
|
||||
{
|
||||
operator: SearchQueryOperators.CONTAINS_ANY,
|
||||
operator: SearchFilterOperator.CONTAINS_ANY,
|
||||
field: "description",
|
||||
type: FieldType.ARRAY,
|
||||
value: "Large box,Heavy box,Small box",
|
||||
|
|
|
@ -12,11 +12,12 @@ import {
|
|||
} from "./utilities"
|
||||
import { convertHBSBlock } from "./conversion"
|
||||
import { setJSRunner, removeJSRunner } from "./helpers/javascript"
|
||||
import { helpersToRemoveForJs } from "./helpers/list"
|
||||
|
||||
import manifest from "./manifest.json"
|
||||
import { ProcessOptions } from "./types"
|
||||
|
||||
export { helpersToRemoveForJs } from "./helpers/list"
|
||||
export { FIND_ANY_HBS_REGEX } from "./utilities"
|
||||
export { setJSRunner, setOnErrorLog } from "./helpers/javascript"
|
||||
export { iifeWrapper } from "./iife"
|
||||
|
||||
|
@ -412,15 +413,9 @@ export function convertToJS(hbs: string) {
|
|||
return `${varBlock}${js}`
|
||||
}
|
||||
|
||||
const _FIND_ANY_HBS_REGEX = FIND_ANY_HBS_REGEX
|
||||
export { _FIND_ANY_HBS_REGEX as FIND_ANY_HBS_REGEX }
|
||||
|
||||
export { JsErrorTimeout } from "./errors"
|
||||
|
||||
const _helpersToRemoveForJs = helpersToRemoveForJs
|
||||
export { _helpersToRemoveForJs as helpersToRemoveForJs }
|
||||
|
||||
function defaultJSSetup() {
|
||||
export function defaultJSSetup() {
|
||||
if (!isBackendService()) {
|
||||
/**
|
||||
* Use polyfilled vm to run JS scripts in a browser Env
|
||||
|
@ -440,6 +435,3 @@ function defaultJSSetup() {
|
|||
}
|
||||
}
|
||||
defaultJSSetup()
|
||||
|
||||
const _defaultJSSetup = defaultJSSetup
|
||||
export { _defaultJSSetup as defaultJSSetup }
|
||||
|
|
|
@ -4,7 +4,6 @@ export const PostProcessorNames = {
|
|||
CONVERT_LITERALS: "convert-literals",
|
||||
}
|
||||
|
||||
/* eslint-disable no-unused-vars */
|
||||
class Postprocessor {
|
||||
name: string
|
||||
private fn: any
|
||||
|
|
|
@ -9,7 +9,6 @@ export const PreprocessorNames = {
|
|||
FINALISE: "finalise",
|
||||
}
|
||||
|
||||
/* eslint-disable no-unused-vars */
|
||||
class Preprocessor {
|
||||
name: string
|
||||
private fn: any
|
||||
|
|
|
@ -126,6 +126,63 @@ describe("Test that the object processing works correctly", () => {
|
|||
})
|
||||
})
|
||||
|
||||
describe("check arrays", () => {
|
||||
describe("index with square brackets", () => {
|
||||
it.each([
|
||||
[0, "1"],
|
||||
[1, "2"],
|
||||
])("should handle an array of primitive types", async (index, expected) => {
|
||||
const json = [1, 2, 3]
|
||||
const output = await processString(`{{ testing.[${index}] }}`, {
|
||||
testing: json,
|
||||
})
|
||||
expect(output).toEqual(expected)
|
||||
})
|
||||
|
||||
it("should handle an array of objects", async () => {
|
||||
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
|
||||
const output = await processString("{{ testing.[1] }}", {
|
||||
testing: json,
|
||||
})
|
||||
expect(output).toEqual('{"value":2}')
|
||||
})
|
||||
|
||||
it("should handle nesting properties in an array of objects", async () => {
|
||||
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
|
||||
const output = await processString("{{ testing.[1].value }}", {
|
||||
testing: json,
|
||||
})
|
||||
expect(output).toEqual("2")
|
||||
})
|
||||
})
|
||||
|
||||
describe("index without square brackets", () => {
|
||||
it("should not handle an array of primitive types", async () => {
|
||||
const json = [1, 2, 3]
|
||||
const output = await processString(`{{ testing.1 }}`, {
|
||||
testing: json,
|
||||
})
|
||||
expect(output).toEqual("{{ testing.1 }}")
|
||||
})
|
||||
|
||||
it("should not handle an array of objects", async () => {
|
||||
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
|
||||
const output = await processString("{{ testing.1 }}", {
|
||||
testing: json,
|
||||
})
|
||||
expect(output).toEqual("{{ testing.1 }}")
|
||||
})
|
||||
|
||||
it("should handle nesting properties in an array of object types", async () => {
|
||||
const json = [{ value: 1 }, { value: 2 }, { value: 3 }]
|
||||
const output = await processString("{{ testing.1.value }}", {
|
||||
testing: json,
|
||||
})
|
||||
expect(output).toEqual("2")
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("check returning objects", () => {
|
||||
it("should handle an array of objects", async () => {
|
||||
const json = [{ a: 1 }, { a: 2 }]
|
||||
|
|
|
@ -1,68 +1,11 @@
|
|||
import { FieldType } from "../../documents"
|
||||
import { EmptyFilterOption } from "../../sdk"
|
||||
import { EmptyFilterOption, SearchFilters } from "../../sdk"
|
||||
|
||||
export type SearchFilter = {
|
||||
operator: keyof SearchQuery
|
||||
operator: keyof SearchFilters | "rangeLow" | "rangeHigh"
|
||||
onEmptyFilter?: EmptyFilterOption
|
||||
field: string
|
||||
type?: FieldType
|
||||
value: any
|
||||
externalType?: string
|
||||
}
|
||||
|
||||
export enum SearchQueryOperators {
|
||||
STRING = "string",
|
||||
FUZZY = "fuzzy",
|
||||
RANGE = "range",
|
||||
EQUAL = "equal",
|
||||
NOT_EQUAL = "notEqual",
|
||||
EMPTY = "empty",
|
||||
NOT_EMPTY = "notEmpty",
|
||||
ONE_OF = "oneOf",
|
||||
CONTAINS = "contains",
|
||||
NOT_CONTAINS = "notContains",
|
||||
CONTAINS_ANY = "containsAny",
|
||||
}
|
||||
|
||||
export type SearchQuery = {
|
||||
allOr?: boolean
|
||||
onEmptyFilter?: EmptyFilterOption
|
||||
[SearchQueryOperators.STRING]?: {
|
||||
[key: string]: string
|
||||
}
|
||||
[SearchQueryOperators.FUZZY]?: {
|
||||
[key: string]: string
|
||||
}
|
||||
[SearchQueryOperators.RANGE]?: {
|
||||
[key: string]: {
|
||||
high: number | string
|
||||
low: number | string
|
||||
}
|
||||
}
|
||||
[SearchQueryOperators.EQUAL]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchQueryOperators.NOT_EQUAL]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchQueryOperators.EMPTY]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchQueryOperators.NOT_EMPTY]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
[SearchQueryOperators.ONE_OF]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
[SearchQueryOperators.CONTAINS]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
[SearchQueryOperators.NOT_CONTAINS]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
[SearchQueryOperators.CONTAINS_ANY]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
}
|
||||
|
||||
export type SearchQueryFields = Omit<SearchQuery, "allOr" | "onEmptyFilter">
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import { User } from "../../documents"
|
||||
import { SearchQuery } from "./searchFilter"
|
||||
import { SearchFilters } from "../../sdk"
|
||||
|
||||
export interface SaveUserResponse {
|
||||
_id: string
|
||||
|
@ -55,7 +55,7 @@ export interface InviteUsersResponse {
|
|||
|
||||
export interface SearchUsersRequest {
|
||||
bookmark?: string
|
||||
query?: SearchQuery
|
||||
query?: SearchFilters
|
||||
appId?: string
|
||||
limit?: number
|
||||
paginate?: boolean
|
||||
|
|
|
@ -102,6 +102,7 @@ export function isVerifiableSSOProvider(provider: AccountSSOProvider): boolean {
|
|||
}
|
||||
|
||||
export interface AccountSSO {
|
||||
ssoId?: string
|
||||
provider: AccountSSOProvider
|
||||
providerType: AccountSSOProviderType
|
||||
oauth2?: OAuthTokens
|
||||
|
|
|
@ -1,22 +1,111 @@
|
|||
import { Document } from "../document"
|
||||
|
||||
export enum FieldType {
|
||||
/**
|
||||
* a primitive type, stores a string, called Text within Budibase. This is one of the default
|
||||
* types of Budibase, if an external type is not fully understood, we will treat it as text.
|
||||
*/
|
||||
STRING = "string",
|
||||
/**
|
||||
* similar to string type, called Long Form Text within Budibase. This is mainly a frontend
|
||||
* orientated type which enables a larger text input area. This can also be used
|
||||
* in conjunction with the 'useRichText' option to support a markdown editor/viewer.
|
||||
*/
|
||||
LONGFORM = "longform",
|
||||
/**
|
||||
* similar to string type, called Options within Budibase. This works very similarly to
|
||||
* the string type within the backend, but is validated to a list of options. This will
|
||||
* display a <select> input within the builder/client.
|
||||
*/
|
||||
OPTIONS = "options",
|
||||
/**
|
||||
* a primitive type, stores a number, as a floating point, called Number within Budibase.
|
||||
* this type will always represent numbers as reals/floating point - there is no integer only
|
||||
* type within Budibase.
|
||||
*/
|
||||
NUMBER = "number",
|
||||
/**
|
||||
* a primitive type, stores a boolean, called Boolean within Budibase. This is often represented
|
||||
* as a toggle or checkbox within forms/grids.
|
||||
*/
|
||||
BOOLEAN = "boolean",
|
||||
/**
|
||||
* a JSON type, this type is always an array of strings, called Multi-select within Budibase.
|
||||
* This type can be compared to the options type, as it functions similarly, but allows picking
|
||||
* multiple options rather than a single option.
|
||||
*/
|
||||
ARRAY = "array",
|
||||
/**
|
||||
* a string type, this is always a string when input/returned from the API, called Date/Time within
|
||||
* Budibase. We utilise ISO date strings for representing dates, this type has a range of subtypes
|
||||
* to restrict it to date only, time only and ignore timezone capabilities.
|
||||
*/
|
||||
DATETIME = "datetime",
|
||||
/**
|
||||
* a JSON type, an array of metadata about files held in object storage, called Attachment List within
|
||||
* Budibase. To utilise this type there is an API for uploading files to Budibase, which returns metadata
|
||||
* that can be stored against columns of this type. Currently this is not supported on external databases.
|
||||
*/
|
||||
ATTACHMENTS = "attachment",
|
||||
/**
|
||||
* a JSON type, similar to the attachments type, called Attachment within Budibase. This type functions
|
||||
* much the same as the attachment list, but only holds a single attachment metadata as an object.
|
||||
* This simplifies the binding experience of using this column type.
|
||||
*/
|
||||
ATTACHMENT_SINGLE = "attachment_single",
|
||||
/**
|
||||
* a complex type, called Relationships within Budibase. This is the most complex type of Budibase,
|
||||
* nothing should be stored against rows under link columns; this type simply represents the
|
||||
* relationship between tables as part of the table schema. When rows are input to the Budibase API
|
||||
* relationships to be made are represented as a list of row IDs to link. When rows are returned
|
||||
* from the Budibase API it will contain a list of row IDs and display column values of the related rows.
|
||||
*/
|
||||
LINK = "link",
|
||||
/**
|
||||
* a complex type, called Formulas within Budibase. This type has two variants, static and dynamic, with
|
||||
* static only being supported against internal tables. Dynamic formulas calculate a provided HBS/JS binding
|
||||
* based on the row context and enrich it when rows are being returned from the API. Static bindings calculate
|
||||
* this when rows are being stored, so that the formula output can be searched upon within the DB.
|
||||
*/
|
||||
FORMULA = "formula",
|
||||
/**
|
||||
* a complex type, called Auto Column within Budibase. This type has a few variants, with options such as a
|
||||
* date for created at/updated at, an auto ID column with auto-increments as rows are saved and a user
|
||||
* relationship type which stores the created by/updated by user details. These subtypes all depend on the
|
||||
* date, number of link types respectively. There is one case where these will be executed in the browser,
|
||||
* that is part of the initial formula definition, the formula will be live evaluated in the browser.
|
||||
*/
|
||||
AUTO = "auto",
|
||||
/**
|
||||
* a JSON type, called JSON within Budibase. This type allows any arbitrary JSON to be input to this column
|
||||
* type, which will be represented as a JSON object in the row. This type depends on a schema being
|
||||
* provided to make the JSON searchable/bindable, the JSON cannot be fully dynamic.
|
||||
*/
|
||||
JSON = "json",
|
||||
/**
|
||||
* @deprecated an internal type, this is an old deprecated type which is no longer used - still represented to note it
|
||||
* could appear in very old tables.
|
||||
*/
|
||||
INTERNAL = "internal",
|
||||
/**
|
||||
* a string type, called Barcode/QR within Budibase. This type is used to denote to forms to that this column
|
||||
* should be filled in using a camera to read a barcode, there is a form component which will be used when this
|
||||
* type is found. The column will contain the contents of any barcode scanned.
|
||||
*/
|
||||
BARCODEQR = "barcodeqr",
|
||||
/**
|
||||
* a string type, this allows representing very large integers, but they are held/managed within Budibase as
|
||||
* strings. When stored in external databases Budibase will attempt to use a real big integer type and depend
|
||||
* on the database parsing the string to this type as part of saving.
|
||||
*/
|
||||
BIGINT = "bigint",
|
||||
/**
|
||||
* a JSON type, called User within Budibase. This type is used to represent a link to an internal Budibase
|
||||
* resource, like a user or group, today only users are supported. This type will be represented as an
|
||||
* array of internal resource IDs (e.g. user IDs) within the row - this ID list will be enriched with
|
||||
* the full resources when rows are returned from the API. The full resources can be input to the API, or
|
||||
* an array of resource IDs, the API will squash these down and validate them before saving the row.
|
||||
*/
|
||||
BB_REFERENCE = "bb_reference",
|
||||
}
|
||||
|
||||
|
|
|
@ -6,17 +6,23 @@ export enum SQLiteType {
|
|||
NUMERIC = "NUMERIC",
|
||||
}
|
||||
|
||||
export type SQLiteTable = Record<
|
||||
string,
|
||||
SQLiteType | { field: string; type: SQLiteType }
|
||||
>
|
||||
|
||||
export type SQLiteTables = Record<
|
||||
string,
|
||||
{
|
||||
fields: SQLiteTable
|
||||
}
|
||||
>
|
||||
|
||||
export interface SQLiteDefinition {
|
||||
_id: string
|
||||
language: string
|
||||
sql: {
|
||||
tables: {
|
||||
[tableName: string]: {
|
||||
fields: {
|
||||
[key: string]: SQLiteType | { field: string; type: SQLiteType }
|
||||
}
|
||||
}
|
||||
}
|
||||
tables: SQLiteTables
|
||||
options: {
|
||||
table_name: string
|
||||
}
|
||||
|
|
|
@ -3,47 +3,63 @@ import { Row, Table } from "../documents"
|
|||
import { SortType } from "../api"
|
||||
import { Knex } from "knex"
|
||||
|
||||
export enum SearchFilterOperator {
|
||||
STRING = "string",
|
||||
FUZZY = "fuzzy",
|
||||
RANGE = "range",
|
||||
EQUAL = "equal",
|
||||
NOT_EQUAL = "notEqual",
|
||||
EMPTY = "empty",
|
||||
NOT_EMPTY = "notEmpty",
|
||||
ONE_OF = "oneOf",
|
||||
CONTAINS = "contains",
|
||||
NOT_CONTAINS = "notContains",
|
||||
CONTAINS_ANY = "containsAny",
|
||||
}
|
||||
|
||||
export interface SearchFilters {
|
||||
allOr?: boolean
|
||||
onEmptyFilter?: EmptyFilterOption
|
||||
string?: {
|
||||
[SearchFilterOperator.STRING]?: {
|
||||
[key: string]: string
|
||||
}
|
||||
fuzzy?: {
|
||||
[SearchFilterOperator.FUZZY]?: {
|
||||
[key: string]: string
|
||||
}
|
||||
range?: {
|
||||
[SearchFilterOperator.RANGE]?: {
|
||||
[key: string]: {
|
||||
high: number | string
|
||||
low: number | string
|
||||
}
|
||||
}
|
||||
equal?: {
|
||||
[SearchFilterOperator.EQUAL]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
notEqual?: {
|
||||
[SearchFilterOperator.NOT_EQUAL]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
empty?: {
|
||||
[SearchFilterOperator.EMPTY]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
notEmpty?: {
|
||||
[SearchFilterOperator.NOT_EMPTY]?: {
|
||||
[key: string]: any
|
||||
}
|
||||
oneOf?: {
|
||||
[SearchFilterOperator.ONE_OF]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
contains?: {
|
||||
[key: string]: any[] | any
|
||||
}
|
||||
notContains?: {
|
||||
[SearchFilterOperator.CONTAINS]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
containsAny?: {
|
||||
[SearchFilterOperator.NOT_CONTAINS]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
[SearchFilterOperator.CONTAINS_ANY]?: {
|
||||
[key: string]: any[]
|
||||
}
|
||||
}
|
||||
|
||||
export type SearchQueryFields = Omit<SearchFilters, "allOr" | "onEmptyFilter">
|
||||
|
||||
export interface SortJson {
|
||||
[key: string]: {
|
||||
direction: SortDirection
|
||||
|
@ -90,8 +106,8 @@ export interface QueryJson {
|
|||
paginate?: PaginationJson
|
||||
body?: Row | Row[]
|
||||
table?: Table
|
||||
meta?: {
|
||||
table?: Table
|
||||
meta: {
|
||||
table: Table
|
||||
tables?: Record<string, Table>
|
||||
renamed?: RenameColumn
|
||||
}
|
||||
|
|
|
@ -704,7 +704,6 @@ describe("scim", () => {
|
|||
expect(response).toEqual({
|
||||
Resources: expect.arrayContaining(
|
||||
groups.map(g => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { members, ...groupData } = g
|
||||
return groupData
|
||||
})
|
||||
|
@ -724,7 +723,6 @@ describe("scim", () => {
|
|||
expect(response).toEqual({
|
||||
Resources: expect.arrayContaining(
|
||||
groups.map(g => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { members, displayName, ...groupData } = g
|
||||
return groupData
|
||||
})
|
||||
|
@ -874,7 +872,6 @@ describe("scim", () => {
|
|||
qs: "excludedAttributes=members",
|
||||
})
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const { members, ...expectedResponse } = group
|
||||
|
||||
expect(response).toEqual(expectedResponse)
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import { features } from "@budibase/backend-core"
|
||||
import env from "./environment"
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
enum WorkerFeature {}
|
||||
|
||||
const featureList: WorkerFeature[] = features.processFeatureEnvVar(
|
||||
|
|
|
@ -4,7 +4,7 @@ import {
|
|||
InviteUsersRequest,
|
||||
User,
|
||||
CreateAdminUserRequest,
|
||||
SearchQuery,
|
||||
SearchFilters,
|
||||
InviteUsersResponse,
|
||||
} from "@budibase/types"
|
||||
import structures from "../structures"
|
||||
|
@ -150,7 +150,7 @@ export class UserAPI extends TestAPI {
|
|||
}
|
||||
|
||||
searchUsers = (
|
||||
{ query }: { query?: SearchQuery },
|
||||
{ query }: { query?: SearchFilters },
|
||||
opts?: { status?: number; noHeaders?: boolean }
|
||||
) => {
|
||||
const req = this.request
|
||||
|
|
|
@ -0,0 +1,31 @@
|
|||
#!/bin/bash
|
||||
yarn global add zbctl
|
||||
export ZEEBE_ADDRESS='localhost:26500'
|
||||
|
||||
cd ../budibase-bpm
|
||||
|
||||
is_camunda_ready() {
|
||||
if (zbctl --insecure status 2>/dev/null) | grep -q 'Healthy'; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
docker-compose up -d
|
||||
echo "waiting for Camunda to be ready..."
|
||||
|
||||
while is_camunda_ready -eq 0; do sleep 1; done
|
||||
|
||||
cd src/main/resources/models
|
||||
|
||||
echo "deploy processes..."
|
||||
zbctl deploy resource offboarding.bpmn --insecure
|
||||
zbctl deploy resource onboarding.bpmn --insecure
|
||||
|
||||
cd ../../../../../budibase/packages/account-portal/packages/server
|
||||
|
||||
yarn worker:run & cd ../../../.. && yarn dev:accountportal
|
||||
|
||||
|
||||
|
115
yarn.lock
115
yarn.lock
|
@ -5174,6 +5174,13 @@
|
|||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/archiver@6.0.2":
|
||||
version "6.0.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/archiver/-/archiver-6.0.2.tgz#0daf8c83359cbde69de1e4b33dcade6a48a929e2"
|
||||
integrity sha512-KmROQqbQzKGuaAbmK+ZcytkJ51+YqDa7NmbXjmtC5YBLSyQYo21YaUnQ3HbaPFKL1ooo6RQ6OPYPIDyxfpDDXw==
|
||||
dependencies:
|
||||
"@types/readdir-glob" "*"
|
||||
|
||||
"@types/aria-query@^5.0.1":
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/aria-query/-/aria-query-5.0.1.tgz#3286741fb8f1e1580ac28784add4c7a1d49bdfbc"
|
||||
|
@ -5923,6 +5930,13 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
|
||||
integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
|
||||
|
||||
"@types/readdir-glob@*":
|
||||
version "1.1.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/readdir-glob/-/readdir-glob-1.1.5.tgz#21a4a98898fc606cb568ad815f2a0eedc24d412a"
|
||||
integrity sha512-raiuEPUYqXu+nvtY2Pe8s8FEmZ3x5yAH4VkLdihcPdalvsHltomrRC9BzuStrJ9yk06470hS0Crw0f1pXqD+Hg==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/redis@^2.8.0":
|
||||
version "2.8.32"
|
||||
resolved "https://registry.yarnpkg.com/@types/redis/-/redis-2.8.32.tgz#1d3430219afbee10f8cfa389dad2571a05ecfb11"
|
||||
|
@ -6993,6 +7007,32 @@ archiver-utils@^3.0.4:
|
|||
normalize-path "^3.0.0"
|
||||
readable-stream "^3.6.0"
|
||||
|
||||
archiver-utils@^5.0.0, archiver-utils@^5.0.2:
|
||||
version "5.0.2"
|
||||
resolved "https://registry.yarnpkg.com/archiver-utils/-/archiver-utils-5.0.2.tgz#63bc719d951803efc72cf961a56ef810760dd14d"
|
||||
integrity sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==
|
||||
dependencies:
|
||||
glob "^10.0.0"
|
||||
graceful-fs "^4.2.0"
|
||||
is-stream "^2.0.1"
|
||||
lazystream "^1.0.0"
|
||||
lodash "^4.17.15"
|
||||
normalize-path "^3.0.0"
|
||||
readable-stream "^4.0.0"
|
||||
|
||||
archiver@7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/archiver/-/archiver-7.0.1.tgz#c9d91c350362040b8927379c7aa69c0655122f61"
|
||||
integrity sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==
|
||||
dependencies:
|
||||
archiver-utils "^5.0.2"
|
||||
async "^3.2.4"
|
||||
buffer-crc32 "^1.0.0"
|
||||
readable-stream "^4.0.0"
|
||||
readdir-glob "^1.1.2"
|
||||
tar-stream "^3.0.0"
|
||||
zip-stream "^6.0.1"
|
||||
|
||||
archiver@^5.3.2:
|
||||
version "5.3.2"
|
||||
resolved "https://registry.yarnpkg.com/archiver/-/archiver-5.3.2.tgz#99991d5957e53bd0303a392979276ac4ddccf3b0"
|
||||
|
@ -7794,6 +7834,11 @@ buffer-crc32@^0.2.1, buffer-crc32@^0.2.13, buffer-crc32@~0.2.3:
|
|||
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
|
||||
integrity sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==
|
||||
|
||||
buffer-crc32@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-1.0.0.tgz#a10993b9055081d55304bd9feb4a072de179f405"
|
||||
integrity sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==
|
||||
|
||||
buffer-equal-constant-time@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819"
|
||||
|
@ -8630,6 +8675,17 @@ compress-commons@^4.1.2:
|
|||
normalize-path "^3.0.0"
|
||||
readable-stream "^3.6.0"
|
||||
|
||||
compress-commons@^6.0.2:
|
||||
version "6.0.2"
|
||||
resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-6.0.2.tgz#26d31251a66b9d6ba23a84064ecd3a6a71d2609e"
|
||||
integrity sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==
|
||||
dependencies:
|
||||
crc-32 "^1.2.0"
|
||||
crc32-stream "^6.0.0"
|
||||
is-stream "^2.0.1"
|
||||
normalize-path "^3.0.0"
|
||||
readable-stream "^4.0.0"
|
||||
|
||||
compressible@^2.0.0, compressible@^2.0.12:
|
||||
version "2.0.18"
|
||||
resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba"
|
||||
|
@ -8922,6 +8978,14 @@ crc32-stream@^4.0.2:
|
|||
crc-32 "^1.2.0"
|
||||
readable-stream "^3.4.0"
|
||||
|
||||
crc32-stream@^6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/crc32-stream/-/crc32-stream-6.0.0.tgz#8529a3868f8b27abb915f6c3617c0fadedbf9430"
|
||||
integrity sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==
|
||||
dependencies:
|
||||
crc-32 "^1.2.0"
|
||||
readable-stream "^4.0.0"
|
||||
|
||||
crc@^3.4.4:
|
||||
version "3.8.0"
|
||||
resolved "https://registry.yarnpkg.com/crc/-/crc-3.8.0.tgz#ad60269c2c856f8c299e2c4cc0de4556914056c6"
|
||||
|
@ -11800,6 +11864,17 @@ glob@7.1.6:
|
|||
once "^1.3.0"
|
||||
path-is-absolute "^1.0.0"
|
||||
|
||||
glob@^10.0.0:
|
||||
version "10.3.12"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-10.3.12.tgz#3a65c363c2e9998d220338e88a5f6ac97302960b"
|
||||
integrity sha512-TCNv8vJ+xz4QiqTpfOJA7HvYv+tNIRHKfUWw/q+v2jdgN4ebz+KY9tGx5J4rHP0o84mNP+ApH66HRX8us3Khqg==
|
||||
dependencies:
|
||||
foreground-child "^3.1.0"
|
||||
jackspeak "^2.3.6"
|
||||
minimatch "^9.0.1"
|
||||
minipass "^7.0.4"
|
||||
path-scurry "^1.10.2"
|
||||
|
||||
glob@^10.2.2:
|
||||
version "10.2.7"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-10.2.7.tgz#9dd2828cd5bc7bd861e7738d91e7113dda41d7d8"
|
||||
|
@ -13186,7 +13261,7 @@ is-stream@^1.1.0:
|
|||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
||||
integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==
|
||||
|
||||
is-stream@^2.0.0:
|
||||
is-stream@^2.0.0, is-stream@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.1.tgz#fac1e3d53b97ad5a9d0ae9cef2389f5810a5c077"
|
||||
integrity sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==
|
||||
|
@ -13432,6 +13507,15 @@ jackspeak@^2.0.3:
|
|||
optionalDependencies:
|
||||
"@pkgjs/parseargs" "^0.11.0"
|
||||
|
||||
jackspeak@^2.3.6:
|
||||
version "2.3.6"
|
||||
resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-2.3.6.tgz#647ecc472238aee4b06ac0e461acc21a8c505ca8"
|
||||
integrity sha512-N3yCS/NegsOBokc8GAdM8UcmfsKiSS8cipheD/nivzr700H+nsMOxJjQnvwOcRYVuFkdH0wGUvW2WbXGmrZGbQ==
|
||||
dependencies:
|
||||
"@isaacs/cliui" "^8.0.2"
|
||||
optionalDependencies:
|
||||
"@pkgjs/parseargs" "^0.11.0"
|
||||
|
||||
jake@^10.8.5:
|
||||
version "10.8.5"
|
||||
resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46"
|
||||
|
@ -15209,6 +15293,11 @@ lowercase-keys@^2.0.0:
|
|||
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
|
||||
integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
|
||||
|
||||
lru-cache@^10.2.0:
|
||||
version "10.2.0"
|
||||
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-10.2.0.tgz#0bd445ca57363465900f4d1f9bd8db343a4d95c3"
|
||||
integrity sha512-2bIM8x+VAf6JT4bKAljS1qUWgMsqZRPGJS6FSahIMPVvctcNhyVp7AJu7quxOW9jwkryBReKZY5tY5JYv2n/7Q==
|
||||
|
||||
lru-cache@^4.1.5:
|
||||
version "4.1.5"
|
||||
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd"
|
||||
|
@ -15834,6 +15923,11 @@ minipass@^5.0.0:
|
|||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-6.0.2.tgz#542844b6c4ce95b202c0995b0a471f1229de4c81"
|
||||
integrity sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w==
|
||||
|
||||
"minipass@^5.0.0 || ^6.0.2 || ^7.0.0", minipass@^7.0.4:
|
||||
version "7.0.4"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c"
|
||||
integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==
|
||||
|
||||
minizlib@^2.0.0, minizlib@^2.1.1, minizlib@^2.1.2:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931"
|
||||
|
@ -17392,6 +17486,14 @@ path-parser@^6.1.0:
|
|||
search-params "3.0.0"
|
||||
tslib "^1.10.0"
|
||||
|
||||
path-scurry@^1.10.2:
|
||||
version "1.10.2"
|
||||
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.10.2.tgz#8f6357eb1239d5fa1da8b9f70e9c080675458ba7"
|
||||
integrity sha512-7xTavNy5RQXnsjANvVvMkEjvloOinkAjv/Z6Ildz9v2RinZ4SBKTWFOVRbaF8p0vpHnyjV/UwNDdKuUv6M5qcA==
|
||||
dependencies:
|
||||
lru-cache "^10.2.0"
|
||||
minipass "^5.0.0 || ^6.0.2 || ^7.0.0"
|
||||
|
||||
path-scurry@^1.6.1, path-scurry@^1.7.0:
|
||||
version "1.9.2"
|
||||
resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.9.2.tgz#90f9d296ac5e37e608028e28a447b11d385b3f63"
|
||||
|
@ -21037,7 +21139,7 @@ tar-stream@^2.0.0, tar-stream@^2.1.4, tar-stream@^2.2.0, tar-stream@~2.2.0:
|
|||
inherits "^2.0.3"
|
||||
readable-stream "^3.1.1"
|
||||
|
||||
tar-stream@^3.1.5:
|
||||
tar-stream@^3.0.0, tar-stream@^3.1.5:
|
||||
version "3.1.7"
|
||||
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-3.1.7.tgz#24b3fb5eabada19fe7338ed6d26e5f7c482e792b"
|
||||
integrity sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==
|
||||
|
@ -22941,3 +23043,12 @@ zip-stream@^4.1.0:
|
|||
archiver-utils "^3.0.4"
|
||||
compress-commons "^4.1.2"
|
||||
readable-stream "^3.6.0"
|
||||
|
||||
zip-stream@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-6.0.1.tgz#e141b930ed60ccaf5d7fa9c8260e0d1748a2bbfb"
|
||||
integrity sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==
|
||||
dependencies:
|
||||
archiver-utils "^5.0.0"
|
||||
compress-commons "^6.0.2"
|
||||
readable-stream "^4.0.0"
|
||||
|
|
Loading…
Reference in New Issue