Merge branch 'master' of github.com:Budibase/budibase into fix-oidc-error-logging

This commit is contained in:
mike12345567 2024-04-19 13:29:21 +01:00
commit 8bb6956925
133 changed files with 2703 additions and 1307 deletions

View File

@ -42,9 +42,18 @@
}, },
"rules": { "rules": {
"no-unused-vars": "off", "no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error",
"local-rules/no-budibase-imports": "error", "local-rules/no-budibase-imports": "error",
"local-rules/no-console-error": "error" "local-rules/no-console-error": "error",
"@typescript-eslint/no-unused-vars": [
"error",
{
"varsIgnorePattern": "^_",
"argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_",
"ignoreRestSiblings": true
}
],
"local-rules/no-budibase-imports": "error"
} }
}, },
{ {
@ -60,7 +69,15 @@
}, },
"rules": { "rules": {
"no-unused-vars": "off", "no-unused-vars": "off",
"@typescript-eslint/no-unused-vars": "error", "@typescript-eslint/no-unused-vars": [
"error",
{
"varsIgnorePattern": "^_",
"argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_",
"ignoreRestSiblings": true
}
],
"local-rules/no-test-com": "error", "local-rules/no-test-com": "error",
"local-rules/email-domain-example-com": "error", "local-rules/email-domain-example-com": "error",
"no-console": "warn", "no-console": "warn",
@ -90,7 +107,8 @@
{ {
"varsIgnorePattern": "^_", "varsIgnorePattern": "^_",
"argsIgnorePattern": "^_", "argsIgnorePattern": "^_",
"destructuredArrayIgnorePattern": "^_" "destructuredArrayIgnorePattern": "^_",
"ignoreRestSiblings": true
} }
], ],
"import/no-relative-packages": "error", "import/no-relative-packages": "error",

View File

@ -24,5 +24,8 @@
}, },
"[svelte]": { "[svelte]": {
"editor.defaultFormatter": "svelte.svelte-vscode" "editor.defaultFormatter": "svelte.svelte-vscode"
},
"[handlebars]": {
"editor.formatOnSave": false
} }
} }

View File

@ -150,6 +150,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.apps.autoscaling.maxReplicas | int | `10` | | | services.apps.autoscaling.maxReplicas | int | `10` | |
| services.apps.autoscaling.minReplicas | int | `1` | | | services.apps.autoscaling.minReplicas | int | `1` | |
| services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. | | services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. |
| services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. |
| services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. |
| services.apps.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main apps container. |
| services.apps.extraVolumes | list | `[]` | Additional volumes to the apps pod. |
| services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. | | services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. |
| services.apps.livenessProbe | object | HTTP health checks. | Liveness probe configuration for apps pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.apps.livenessProbe | object | HTTP health checks. | Liveness probe configuration for apps pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.apps.logLevel | string | `"info"` | The log level for the apps service. | | services.apps.logLevel | string | `"info"` | The log level for the apps service. |
@ -162,6 +166,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.automationWorkers.autoscaling.minReplicas | int | `1` | | | services.automationWorkers.autoscaling.minReplicas | int | `1` | |
| services.automationWorkers.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the automation worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the automation worker pods. | | services.automationWorkers.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the automation worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the automation worker pods. |
| services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. | | services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. |
| services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. |
| services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. |
| services.automationWorkers.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main automationWorkers container. |
| services.automationWorkers.extraVolumes | list | `[]` | Additional volumes to the automationWorkers pod. |
| services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.automationWorkers.logLevel | string | `"info"` | The log level for the automation worker service. | | services.automationWorkers.logLevel | string | `"info"` | The log level for the automation worker service. |
| services.automationWorkers.readinessProbe | object | HTTP health checks. | Readiness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.automationWorkers.readinessProbe | object | HTTP health checks. | Readiness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
@ -180,6 +188,9 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.objectStore.cloudfront.cdn | string | `""` | Set the url of a distribution to enable cloudfront. | | services.objectStore.cloudfront.cdn | string | `""` | Set the url of a distribution to enable cloudfront. |
| services.objectStore.cloudfront.privateKey64 | string | `""` | Base64 encoded private key for the above public key. | | services.objectStore.cloudfront.privateKey64 | string | `""` | Base64 encoded private key for the above public key. |
| services.objectStore.cloudfront.publicKeyId | string | `""` | ID of public key stored in cloudfront. | | services.objectStore.cloudfront.publicKeyId | string | `""` | ID of public key stored in cloudfront. |
| services.objectStore.extraContainers | list | `[]` | Additional containers to be added to the objectStore pod. |
| services.objectStore.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main objectStore container. |
| services.objectStore.extraVolumes | list | `[]` | Additional volumes to the objectStore pod. |
| services.objectStore.minio | bool | `true` | Set to false if using another object store, such as S3. You will need to set `services.objectStore.url` to point to your bucket if you do this. | | services.objectStore.minio | bool | `true` | Set to false if using another object store, such as S3. You will need to set `services.objectStore.url` to point to your bucket if you do this. |
| services.objectStore.region | string | `""` | AWS_REGION if using S3 | | services.objectStore.region | string | `""` | AWS_REGION if using S3 |
| services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. | | services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
@ -191,12 +202,19 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.proxy.autoscaling.maxReplicas | int | `10` | | | services.proxy.autoscaling.maxReplicas | int | `10` | |
| services.proxy.autoscaling.minReplicas | int | `1` | | | services.proxy.autoscaling.minReplicas | int | `1` | |
| services.proxy.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the proxy service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the proxy pods. | | services.proxy.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the proxy service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the proxy pods. |
| services.proxy.extraContainers | list | `[]` | |
| services.proxy.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main proxy container. |
| services.proxy.extraVolumes | list | `[]` | Additional volumes to the proxy pod. |
| services.proxy.livenessProbe | object | HTTP health checks. | Liveness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.proxy.livenessProbe | object | HTTP health checks. | Liveness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.proxy.readinessProbe | object | HTTP health checks. | Readiness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.proxy.readinessProbe | object | HTTP health checks. | Readiness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.proxy.replicaCount | int | `1` | The number of proxy replicas to run. | | services.proxy.replicaCount | int | `1` | The number of proxy replicas to run. |
| services.proxy.resources | object | `{}` | The resources to use for proxy pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. | | services.proxy.resources | object | `{}` | The resources to use for proxy pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
| services.proxy.startupProbe | object | HTTP health checks. | Startup probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.proxy.startupProbe | object | HTTP health checks. | Startup probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.redis.enabled | bool | `true` | Whether or not to deploy a Redis pod into your cluster. | | services.redis.enabled | bool | `true` | Whether or not to deploy a Redis pod into your cluster. |
| services.redis.extraContainers | list | `[]` | Additional containers to be added to the redis pod. |
| services.redis.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main redis container. |
| services.redis.extraVolumes | list | `[]` | Additional volumes to the redis pod. |
| services.redis.image | string | `"redis"` | The Redis image to use. |
| services.redis.password | string | `"budibase"` | The password to use when connecting to Redis. It's recommended that you change this from the default if you're running Redis in-cluster. | | services.redis.password | string | `"budibase"` | The password to use when connecting to Redis. It's recommended that you change this from the default if you're running Redis in-cluster. |
| services.redis.port | int | `6379` | Port to expose Redis on. | | services.redis.port | int | `6379` | Port to expose Redis on. |
| services.redis.resources | object | `{}` | The resources to use for Redis pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. | | services.redis.resources | object | `{}` | The resources to use for Redis pods. See <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> for more information on how to set these. |
@ -207,6 +225,10 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.worker.autoscaling.maxReplicas | int | `10` | | | services.worker.autoscaling.maxReplicas | int | `10` | |
| services.worker.autoscaling.minReplicas | int | `1` | | | services.worker.autoscaling.minReplicas | int | `1` | |
| services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. | | services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. |
| services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. |
| services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. |
| services.worker.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main worker container. |
| services.worker.extraVolumes | list | `[]` | Additional volumes to the worker pod. |
| services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. | | services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. |
| services.worker.livenessProbe | object | HTTP health checks. | Liveness probe configuration for worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> | | services.worker.livenessProbe | object | HTTP health checks. | Liveness probe configuration for worker pods. You shouldn't need to change this, but if you want to you can find more information here: <https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/> |
| services.worker.logLevel | string | `"info"` | The log level for the worker service. | | services.worker.logLevel | string | `"info"` | The log level for the worker service. |
@ -225,4 +247,4 @@ $ helm uninstall --namespace budibase budibase
``` ```
---------------------------------------------- ----------------------------------------------
Autogenerated from chart metadata using [helm-docs v1.11.3](https://github.com/norwoodj/helm-docs/releases/v1.11.3) Autogenerated from chart metadata using [helm-docs v1.13.1](https://github.com/norwoodj/helm-docs/releases/v1.13.1)

View File

@ -235,6 +235,13 @@ spec:
args: args:
{{- toYaml .Values.services.apps.args | nindent 10 }} {{- toYaml .Values.services.apps.args | nindent 10 }}
{{ end }} {{ end }}
{{ if .Values.services.apps.extraVolumeMounts }}
volumeMounts:
{{- toYaml .Values.services.apps.extraVolumeMounts | nindent 10 }}
{{- end }}
{{- if .Values.services.apps.extraContainers }}
{{- toYaml .Values.services.apps.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }} {{- with .Values.affinity }}
affinity: affinity:
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
@ -258,4 +265,8 @@ spec:
- name: ndots - name: ndots
value: {{ .Values.services.apps.ndots | quote }} value: {{ .Values.services.apps.ndots | quote }}
{{ end }} {{ end }}
{{ if .Values.services.apps.extraVolumes }}
volumes:
{{- toYaml .Values.services.apps.extraVolumes | nindent 6 }}
{{- end }}
status: {} status: {}

View File

@ -235,6 +235,13 @@ spec:
args: args:
{{- toYaml .Values.services.automationWorkers.args | nindent 10 }} {{- toYaml .Values.services.automationWorkers.args | nindent 10 }}
{{ end }} {{ end }}
{{ if .Values.services.automationWorkers.extraVolumeMounts }}
volumeMounts:
{{- toYaml .Values.services.automationWorkers.extraVolumeMounts | nindent 10 }}
{{ end }}
{{- if .Values.services.automationWorkers.extraContainers }}
{{- toYaml .Values.services.automationWorkers.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }} {{- with .Values.affinity }}
affinity: affinity:
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
@ -258,5 +265,9 @@ spec:
- name: ndots - name: ndots
value: {{ .Values.services.automationWorkers.ndots | quote }} value: {{ .Values.services.automationWorkers.ndots | quote }}
{{ end }} {{ end }}
{{ if .Values.services.automationWorkers.extraVolumes }}
volumes:
{{- toYaml .Values.services.automationWorkers.extraVolumes | nindent 8 }}
{{ end }}
status: {} status: {}
{{- end }} {{- end }}

View File

@ -54,6 +54,12 @@ spec:
volumeMounts: volumeMounts:
- mountPath: /data - mountPath: /data
name: minio-data name: minio-data
{{ if .Values.services.objectStore.extraVolumeMounts }}
{{- toYaml .Values.services.objectStore.extraVolumeMounts | nindent 8 }}
{{- end }}
{{- if .Values.services.objectStore.extraContainers }}
{{- toYaml .Values.services.objectStore.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }} {{- with .Values.affinity }}
affinity: affinity:
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
@ -75,5 +81,8 @@ spec:
- name: minio-data - name: minio-data
persistentVolumeClaim: persistentVolumeClaim:
claimName: minio-data claimName: minio-data
{{ if .Values.services.objectStore.extraVolumes }}
{{- toYaml .Values.services.objectStore.extraVolumes | nindent 6 }}
{{- end }}
status: {} status: {}
{{- end }} {{- end }}

View File

@ -82,7 +82,13 @@ spec:
resources: resources:
{{- toYaml . | nindent 10 }} {{- toYaml . | nindent 10 }}
{{ end }} {{ end }}
{{ if .Values.services.proxy.extraVolumeMounts }}
volumeMounts: volumeMounts:
{{- toYaml .Values.services.proxy.extraVolumeMounts | nindent 8 }}
{{- end }}
{{- if .Values.services.proxy.extraContainers }}
{{- toYaml .Values.services.proxy.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }} {{- with .Values.affinity }}
affinity: affinity:
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
@ -108,7 +114,10 @@ spec:
args: args:
{{- toYaml .Values.services.proxy.args | nindent 8 }} {{- toYaml .Values.services.proxy.args | nindent 8 }}
{{ end }} {{ end }}
{{ if .Values.services.proxy.extraVolumes }}
volumes: volumes:
{{- toYaml .Values.services.proxy.extraVolumes | nindent 6 }}
{{ end }}
{{ if .Values.services.proxy.ndots }} {{ if .Values.services.proxy.ndots }}
dnsConfig: dnsConfig:
options: options:

View File

@ -22,7 +22,7 @@ spec:
- redis-server - redis-server
- --requirepass - --requirepass
- {{ .Values.services.redis.password }} - {{ .Values.services.redis.password }}
image: redis image: {{ .Values.services.redis.image }}
imagePullPolicy: "" imagePullPolicy: ""
name: redis-service name: redis-service
ports: ports:
@ -34,6 +34,12 @@ spec:
volumeMounts: volumeMounts:
- mountPath: /data - mountPath: /data
name: redis-data name: redis-data
{{ if .Values.services.redis.extraVolumeMounts }}
{{- toYaml .Values.services.redis.extraVolumeMounts | nindent 8 }}
{{- end }}
{{- if .Values.services.redis.extraContainers }}
{{- toYaml .Values.services.redis.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }} {{- with .Values.affinity }}
affinity: affinity:
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
@ -55,6 +61,9 @@ spec:
- name: redis-data - name: redis-data
persistentVolumeClaim: persistentVolumeClaim:
claimName: redis-data claimName: redis-data
{{ if .Values.services.redis.extraVolumes }}
{{- toYaml .Values.services.redis.extraVolumes | nindent 6 }}
{{- end }}
status: {} status: {}
{{- end }} {{- end }}

View File

@ -221,6 +221,13 @@ spec:
args: args:
{{- toYaml .Values.services.worker.args | nindent 10 }} {{- toYaml .Values.services.worker.args | nindent 10 }}
{{ end }} {{ end }}
{{ if .Values.services.worker.extraVolumeMounts }}
volumeMounts:
{{- toYaml .Values.services.worker.extraVolumeMounts | nindent 10 }}
{{- end }}
{{- if .Values.services.worker.extraContainers }}
{{- toYaml .Values.services.worker.extraContainers | nindent 6 }}
{{- end }}
{{- with .Values.affinity }} {{- with .Values.affinity }}
affinity: affinity:
{{- toYaml . | nindent 8 }} {{- toYaml . | nindent 8 }}
@ -244,4 +251,8 @@ spec:
- name: ndots - name: ndots
value: {{ .Values.services.worker.ndots | quote }} value: {{ .Values.services.worker.ndots | quote }}
{{ end }} {{ end }}
{{ if .Values.services.worker.extraVolumes }}
volumes:
{{- toYaml .Values.services.worker.extraVolumes | nindent 6 }}
{{- end }}
status: {} status: {}

View File

@ -206,6 +206,20 @@ services:
# for autoscaling to work, you will need to have metrics-server # for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the proxy pods. # configured, and resources set for the proxy pods.
targetCPUUtilizationPercentage: 80 targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the proxy pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
# -- Additional volumeMounts to the main proxy container.
extraVolumeMounts: []
# - name: my-volume
# mountPath: /path/to/mount
# -- Additional volumes to the proxy pod.
extraVolumes: []
# - name: my-volume
# emptyDir: {}
apps: apps:
# @ignore (you shouldn't need to change this) # @ignore (you shouldn't need to change this)
@ -274,6 +288,20 @@ services:
# autoscaling to work, you will need to have metrics-server configured, # autoscaling to work, you will need to have metrics-server configured,
# and resources set for the apps pods. # and resources set for the apps pods.
targetCPUUtilizationPercentage: 80 targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the apps pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
# -- Additional volumeMounts to the main apps container.
extraVolumeMounts: []
# - name: my-volume
# mountPath: /path/to/mount
# -- Additional volumes to the apps pod.
extraVolumes: []
# - name: my-volume
# emptyDir: {}
automationWorkers: automationWorkers:
# -- Whether or not to enable the automation worker service. If you disable this, # -- Whether or not to enable the automation worker service. If you disable this,
@ -346,6 +374,20 @@ services:
# Note that for autoscaling to work, you will need to have metrics-server # Note that for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the automation worker pods. # configured, and resources set for the automation worker pods.
targetCPUUtilizationPercentage: 80 targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the automationWorkers pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
# -- Additional volumeMounts to the main automationWorkers container.
extraVolumeMounts: []
# - name: my-volume
# mountPath: /path/to/mount
# -- Additional volumes to the automationWorkers pod.
extraVolumes: []
# - name: my-volume
# emptyDir: {}
worker: worker:
# @ignore (you shouldn't need to change this) # @ignore (you shouldn't need to change this)
@ -414,6 +456,20 @@ services:
# for autoscaling to work, you will need to have metrics-server # for autoscaling to work, you will need to have metrics-server
# configured, and resources set for the worker pods. # configured, and resources set for the worker pods.
targetCPUUtilizationPercentage: 80 targetCPUUtilizationPercentage: 80
# -- Additional containers to be added to the worker pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
# -- Additional volumeMounts to the main worker container.
extraVolumeMounts: []
# - name: my-volume
# mountPath: /path/to/mount
# -- Additional volumes to the worker pod.
extraVolumes: []
# - name: my-volume
# emptyDir: {}
couchdb: couchdb:
# -- Whether or not to spin up a CouchDB instance in your cluster. True by # -- Whether or not to spin up a CouchDB instance in your cluster. True by
@ -440,6 +496,8 @@ services:
resources: {} resources: {}
redis: redis:
# -- The Redis image to use.
image: redis
# -- Whether or not to deploy a Redis pod into your cluster. # -- Whether or not to deploy a Redis pod into your cluster.
enabled: true enabled: true
# -- Port to expose Redis on. # -- Port to expose Redis on.
@ -463,6 +521,20 @@ services:
# <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/> # <https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/>
# for more information on how to set these. # for more information on how to set these.
resources: {} resources: {}
# -- Additional containers to be added to the redis pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
# -- Additional volumeMounts to the main redis container.
extraVolumeMounts: []
# - name: my-volume
# mountPath: /path/to/mount
# -- Additional volumes to the redis pod.
extraVolumes: []
# - name: my-volume
# emptyDir: {}
objectStore: objectStore:
# -- Set to false if using another object store, such as S3. You will need # -- Set to false if using another object store, such as S3. You will need
@ -488,7 +560,7 @@ services:
# do this. # do this.
url: "http://minio-service:9000" url: "http://minio-service:9000"
# -- How much storage to give Minio in its PersistentVolumeClaim. # -- How much storage to give Minio in its PersistentVolumeClaim.
storage: 100Mi storage: 2Gi
# -- If defined, storageClassName: <storageClass> If set to "-", # -- If defined, storageClassName: <storageClass> If set to "-",
# storageClassName: "", which disables dynamic provisioning If undefined # storageClassName: "", which disables dynamic provisioning If undefined
# (the default) or set to null, no storageClassName spec is set, choosing # (the default) or set to null, no storageClassName spec is set, choosing
@ -505,6 +577,20 @@ services:
publicKeyId: "" publicKeyId: ""
# -- Base64 encoded private key for the above public key. # -- Base64 encoded private key for the above public key.
privateKey64: "" privateKey64: ""
# -- Additional containers to be added to the objectStore pod.
extraContainers: []
# - name: my-sidecar
# image: myimage:latest
# -- Additional volumeMounts to the main objectStore container.
extraVolumeMounts: []
# - name: my-volume
# mountPath: /path/to/mount
# -- Additional volumes to the objectStore pod.
extraVolumes: []
# - name: my-volume
# emptyDir: {}
# Override values in couchDB subchart. We're only specifying the values we're changing. # Override values in couchDB subchart. We're only specifying the values we're changing.
# If you want to see all of the available values, see: # If you want to see all of the available values, see:

View File

@ -17,6 +17,7 @@ APP_PORT=4002
WORKER_PORT=4003 WORKER_PORT=4003
MINIO_PORT=4004 MINIO_PORT=4004
COUCH_DB_PORT=4005 COUCH_DB_PORT=4005
COUCH_DB_SQS_PORT=4006
REDIS_PORT=6379 REDIS_PORT=6379
WATCHTOWER_PORT=6161 WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION BUDIBASE_ENVIRONMENT=PRODUCTION

View File

@ -1,5 +1,5 @@
{ {
"version": "2.23.4", "version": "2.23.10",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -9,10 +9,7 @@
}, },
"targetDefaults": { "targetDefaults": {
"build": { "build": {
"inputs": [ "inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"]
"{workspaceRoot}/scripts/build.js",
"{workspaceRoot}/lerna.json"
]
} }
} }
} }

View File

@ -56,9 +56,10 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server", "dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server", "dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server", "dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
"dev:camunda": "./scripts/deploy-camunda.sh",
"dev:all": "yarn run kill-all && lerna run --stream dev", "dev:all": "yarn run kill-all && lerna run --stream dev",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0", "dev:docker": "./scripts/devDocker.sh",
"test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream", "test": "REUSE_CONTAINERS=1 lerna run --concurrency 1 --stream test --stream",
"lint:eslint": "eslint packages --max-warnings=0", "lint:eslint": "eslint packages --max-warnings=0",
"lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"",

@ -1 +1 @@
Subproject commit bd0e01d639ec3b2547e7c859a1c43b622dce8344 Subproject commit c167c331ff9b8161fc18e2ecbaaf1ea5815ba964

View File

@ -8,19 +8,9 @@ import {
SearchParams, SearchParams,
WithRequired, WithRequired,
} from "@budibase/types" } from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
const QUERY_START_REGEX = /\d[0-9]*:/g export const removeKeyNumbering = dataFilters.removeKeyNumbering
export function removeKeyNumbering(key: any): string {
if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) {
const parts = key.split(":")
// remove the number
parts.shift()
return parts.join(":")
} else {
return key
}
}
/** /**
* Class to build lucene query URLs. * Class to build lucene query URLs.

View File

@ -107,7 +107,7 @@ const environment = {
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY, ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
API_ENCRYPTION_KEY: getAPIEncryptionKey(), API_ENCRYPTION_KEY: getAPIEncryptionKey(),
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005", COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4984", COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER, COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD, COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID, GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,

View File

@ -115,7 +115,6 @@ class InMemoryQueue implements Partial<Queue> {
* a JSON message as this is required by Bull. * a JSON message as this is required by Bull.
* @param repeat serves no purpose for the import queue. * @param repeat serves no purpose for the import queue.
*/ */
// eslint-disable-next-line no-unused-vars
async add(data: any, opts?: JobOptions) { async add(data: any, opts?: JobOptions) {
const jobId = opts?.jobId?.toString() const jobId = opts?.jobId?.toString()
if (jobId && this._queuedJobIds.has(jobId)) { if (jobId && this._queuedJobIds.has(jobId)) {
@ -166,8 +165,7 @@ class InMemoryQueue implements Partial<Queue> {
return [] return []
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars async removeJobs(_pattern: string) {
async removeJobs(pattern: string) {
// no-op // no-op
} }

View File

@ -50,6 +50,8 @@ type CreateAdminUserOpts = {
hashPassword?: boolean hashPassword?: boolean
requirePassword?: boolean requirePassword?: boolean
skipPasswordValidation?: boolean skipPasswordValidation?: boolean
firstName?: string
lastName?: string
} }
type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn } type FeatureFns = { isSSOEnforced: FeatureFn; isAppBuildersEnabled: FeatureFn }
@ -517,6 +519,8 @@ export class UserDB {
global: true, global: true,
}, },
tenantId, tenantId,
firstName: opts?.firstName,
lastName: opts?.lastName,
} }
if (opts?.ssoId) { if (opts?.ssoId) {
user.ssoId = opts.ssoId user.ssoId = opts.ssoId

View File

@ -17,8 +17,8 @@ import {
ContextUser, ContextUser,
CouchFindOptions, CouchFindOptions,
DatabaseQueryOpts, DatabaseQueryOpts,
SearchQuery, SearchFilters,
SearchQueryOperators, SearchFilterOperator,
SearchUsersRequest, SearchUsersRequest,
User, User,
} from "@budibase/types" } from "@budibase/types"
@ -44,11 +44,11 @@ function removeUserPassword(users: User | User[]) {
return users return users
} }
export function isSupportedUserSearch(query: SearchQuery) { export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [ const allowed = [
{ op: SearchQueryOperators.STRING, key: "email" }, { op: SearchFilterOperator.STRING, key: "email" },
{ op: SearchQueryOperators.EQUAL, key: "_id" }, { op: SearchFilterOperator.EQUAL, key: "_id" },
{ op: SearchQueryOperators.ONE_OF, key: "_id" }, { op: SearchFilterOperator.ONE_OF, key: "_id" },
] ]
for (let [key, operation] of Object.entries(query)) { for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") { if (typeof operation !== "object") {

View File

@ -14,6 +14,7 @@
notifications, notifications,
Checkbox, Checkbox,
DatePicker, DatePicker,
DrawerContent,
} from "@budibase/bbui" } from "@budibase/bbui"
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
import { automationStore, selectedAutomation, tables } from "stores/builder" import { automationStore, selectedAutomation, tables } from "stores/builder"
@ -37,7 +38,7 @@
hbAutocomplete, hbAutocomplete,
EditorModes, EditorModes,
} from "components/common/CodeEditor" } from "components/common/CodeEditor"
import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte" import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
import { LuceneUtils, Utils } from "@budibase/frontend-core" import { LuceneUtils, Utils } from "@budibase/frontend-core"
import { import {
getSchemaForDatasourcePlus, getSchemaForDatasourcePlus,
@ -442,8 +443,8 @@
<Button cta slot="buttons" on:click={() => saveFilters(key)}> <Button cta slot="buttons" on:click={() => saveFilters(key)}>
Save Save
</Button> </Button>
<FilterDrawer <DrawerContent slot="body">
slot="body" <FilterBuilder
{filters} {filters}
{bindings} {bindings}
{schemaFields} {schemaFields}
@ -451,6 +452,7 @@
panel={AutomationBindingPanel} panel={AutomationBindingPanel}
on:change={e => (tempFilters = e.detail)} on:change={e => (tempFilters = e.detail)}
/> />
</DrawerContent>
</Drawer> </Drawer>
{:else if value.customType === "password"} {:else if value.customType === "password"}
<Input <Input

View File

@ -1,7 +1,7 @@
<script> <script>
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { ActionButton, Modal, ModalContent } from "@budibase/bbui" import { ActionButton, Modal, ModalContent } from "@budibase/bbui"
import FilterDrawer from "components/design/settings/controls/FilterEditor/FilterDrawer.svelte" import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
export let schema export let schema
export let filters export let filters
@ -40,7 +40,7 @@
onConfirm={() => dispatch("change", tempValue)} onConfirm={() => dispatch("change", tempValue)}
> >
<div class="wrapper"> <div class="wrapper">
<FilterDrawer <FilterBuilder
allowBindings={false} allowBindings={false}
{filters} {filters}
{schemaFields} {schemaFields}

View File

@ -13,6 +13,7 @@
Layout, Layout,
AbsTooltip, AbsTooltip,
} from "@budibase/bbui" } from "@budibase/bbui"
import { SWITCHABLE_TYPES, ValidColumnNameRegex } from "@budibase/shared-core"
import { createEventDispatcher, getContext, onMount } from "svelte" import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { tables, datasources } from "stores/builder" import { tables, datasources } from "stores/builder"
@ -20,11 +21,6 @@
import { import {
FIELDS, FIELDS,
RelationshipType, RelationshipType,
ALLOWABLE_STRING_OPTIONS,
ALLOWABLE_NUMBER_OPTIONS,
ALLOWABLE_STRING_TYPES,
ALLOWABLE_NUMBER_TYPES,
SWITCHABLE_TYPES,
PrettyRelationshipDefinitions, PrettyRelationshipDefinitions,
DB_TYPE_EXTERNAL, DB_TYPE_EXTERNAL,
} from "constants/backend" } from "constants/backend"
@ -33,21 +29,20 @@
import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte" import ModalBindableInput from "components/common/bindings/ModalBindableInput.svelte"
import { getBindings } from "components/backend/DataTable/formula" import { getBindings } from "components/backend/DataTable/formula"
import JSONSchemaModal from "./JSONSchemaModal.svelte" import JSONSchemaModal from "./JSONSchemaModal.svelte"
import { ValidColumnNameRegex } from "@budibase/shared-core"
import { FieldType, FieldSubtype, SourceName } from "@budibase/types" import { FieldType, FieldSubtype, SourceName } from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte" import RelationshipSelector from "components/common/RelationshipSelector.svelte"
import { RowUtils } from "@budibase/frontend-core" import { RowUtils } from "@budibase/frontend-core"
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte" import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
const AUTO_TYPE = FIELDS.AUTO.type const AUTO_TYPE = FieldType.AUTO
const FORMULA_TYPE = FIELDS.FORMULA.type const FORMULA_TYPE = FieldType.FORMULA
const LINK_TYPE = FIELDS.LINK.type const LINK_TYPE = FieldType.LINK
const STRING_TYPE = FIELDS.STRING.type const STRING_TYPE = FieldType.STRING
const NUMBER_TYPE = FIELDS.NUMBER.type const NUMBER_TYPE = FieldType.NUMBER
const JSON_TYPE = FIELDS.JSON.type const JSON_TYPE = FieldType.JSON
const DATE_TYPE = FIELDS.DATETIME.type const DATE_TYPE = FieldType.DATETIME
const USER_TYPE = FIELDS.USER.subtype const USER_TYPE = FieldSubtype.USER
const USERS_TYPE = FIELDS.USERS.subtype const USERS_TYPE = FieldSubtype.USERS
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"] const PROHIBITED_COLUMN_NAMES = ["type", "_id", "_rev", "tableId"]
@ -61,8 +56,8 @@
let primaryDisplay let primaryDisplay
let indexes = [...($tables.selected.indexes || [])] let indexes = [...($tables.selected.indexes || [])]
let isCreating = undefined let isCreating = undefined
let relationshipPart1 = PrettyRelationshipDefinitions.Many let relationshipPart1 = PrettyRelationshipDefinitions.MANY
let relationshipPart2 = PrettyRelationshipDefinitions.One let relationshipPart2 = PrettyRelationshipDefinitions.ONE
let relationshipTableIdPrimary = null let relationshipTableIdPrimary = null
let relationshipTableIdSecondary = null let relationshipTableIdSecondary = null
let table = $tables.selected let table = $tables.selected
@ -175,7 +170,7 @@
$: typeEnabled = $: typeEnabled =
!originalName || !originalName ||
(originalName && (originalName &&
SWITCHABLE_TYPES.indexOf(editableColumn.type) !== -1 && SWITCHABLE_TYPES[field.type] &&
!editableColumn?.autocolumn) !editableColumn?.autocolumn)
const fieldDefinitions = Object.values(FIELDS).reduce( const fieldDefinitions = Object.values(FIELDS).reduce(
@ -367,16 +362,15 @@
} }
function getAllowedTypes() { function getAllowedTypes() {
if ( if (originalName) {
originalName && const possibleTypes = (
ALLOWABLE_STRING_TYPES.indexOf(editableColumn.type) !== -1 SWITCHABLE_TYPES[field.type] || [editableColumn.type]
) { ).map(t => t.toLowerCase())
return ALLOWABLE_STRING_OPTIONS return Object.entries(FIELDS)
} else if ( .filter(([fieldType]) =>
originalName && possibleTypes.includes(fieldType.toLowerCase())
ALLOWABLE_NUMBER_TYPES.indexOf(editableColumn.type) !== -1 )
) { .map(([_, fieldDefinition]) => fieldDefinition)
return ALLOWABLE_NUMBER_OPTIONS
} }
const isUsers = const isUsers =
@ -632,7 +626,7 @@
/> />
</div> </div>
</div> </div>
{:else if editableColumn.type === FieldType.LINK} {:else if editableColumn.type === FieldType.LINK && !editableColumn.autocolumn}
<RelationshipSelector <RelationshipSelector
bind:relationshipPart1 bind:relationshipPart1
bind:relationshipPart2 bind:relationshipPart2

View File

@ -27,14 +27,6 @@
return [] return []
} }
} }
async function deleteAttachments(fileList) {
try {
return await API.deleteBuilderAttachments(fileList)
} catch (error) {
return []
}
}
</script> </script>
<Dropzone <Dropzone
@ -42,6 +34,5 @@
{label} {label}
{...$$restProps} {...$$restProps}
{processFiles} {processFiles}
{deleteAttachments}
{handleFileTooLarge} {handleFileTooLarge}
/> />

View File

@ -9,7 +9,6 @@
"", "",
requiredValidator requiredValidator
) )
// eslint-disable-next-line no-unused-vars
const [repeatPassword, _, repeatTouched] = createValidationStore( const [repeatPassword, _, repeatTouched] = createValidationStore(
"", "",
requiredValidator requiredValidator

View File

@ -0,0 +1,110 @@
<script>
import { Select, Label } from "@budibase/bbui"
import { onMount } from "svelte"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import { FieldType } from "@budibase/types"
import { tables, viewsV2 } from "stores/builder"
export let parameters
export let bindings = []
const fileOptions = [
{
label: "Attachment",
value: "attachment",
},
{
label: "URL",
value: "url",
},
]
$: tableOptions = $tables.list.map(table => ({
label: table.name,
resourceId: table._id,
schema: table.schema,
}))
$: viewOptions = $viewsV2.list.map(view => ({
label: view.name,
resourceId: view.id,
schema: view.schema,
}))
$: options = [...(tableOptions || []), ...(viewOptions || [])]
$: selectedTable =
parameters.tableId && options.find(t => t.resourceId === parameters.tableId)
$: attachmentColumns =
selectedTable &&
Object.values(selectedTable.schema).filter(c =>
[FieldType.ATTACHMENTS, FieldType.ATTACHMENT_SINGLE].includes(c.type)
)
onMount(() => {
if (!parameters.type) {
parameters.type = "attachment"
}
})
</script>
<div class="root">
<Label small>File</Label>
<Select
placeholder={null}
bind:value={parameters.type}
options={fileOptions}
/>
{#if parameters.type === "attachment"}
<Label>Table</Label>
<Select
placeholder={null}
bind:value={parameters.tableId}
{options}
getOptionLabel={table => table.label}
getOptionValue={table => table.resourceId}
/>
<Label small>Column</Label>
<Select
disabled={!attachmentColumns?.length}
placeholder={parameters.tableId && !attachmentColumns?.length
? "This table has no attachment columns"
: undefined}
bind:value={parameters.attachmentColumn}
options={attachmentColumns?.map(c => c.name)}
/>
<Label small>Row ID</Label>
<DrawerBindableInput
{bindings}
title="Row ID"
value={parameters.rowId}
on:change={value => (parameters.rowId = value.detail)}
/>
{:else}
<Label small>URL</Label>
<DrawerBindableInput
title="URL"
{bindings}
value={parameters.url}
on:change={value => (parameters.url = value.detail)}
/>
<Label small>File name</Label>
<DrawerBindableInput
title="File name"
{bindings}
value={parameters.fileName}
on:change={value => (parameters.fileName = value.detail)}
/>
{/if}
</div>
<style>
.root {
display: grid;
column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
grid-template-columns: 60px 1fr;
align-items: center;
max-width: 800px;
margin: 0 auto;
}
</style>

View File

@ -22,3 +22,4 @@ export { default as PromptUser } from "./PromptUser.svelte"
export { default as OpenSidePanel } from "./OpenSidePanel.svelte" export { default as OpenSidePanel } from "./OpenSidePanel.svelte"
export { default as CloseSidePanel } from "./CloseSidePanel.svelte" export { default as CloseSidePanel } from "./CloseSidePanel.svelte"
export { default as ClearRowSelection } from "./ClearRowSelection.svelte" export { default as ClearRowSelection } from "./ClearRowSelection.svelte"
export { default as DownloadFile } from "./DownloadFile.svelte"

View File

@ -161,6 +161,11 @@
"name": "Clear Row Selection", "name": "Clear Row Selection",
"type": "data", "type": "data",
"component": "ClearRowSelection" "component": "ClearRowSelection"
},
{
"name": "Download File",
"type": "data",
"component": "DownloadFile"
} }
] ]
} }

View File

@ -0,0 +1,84 @@
<script>
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { dataFilters } from "@budibase/shared-core"
import { FilterBuilder } from "@budibase/frontend-core"
import { createEventDispatcher, onMount } from "svelte"
export let schemaFields
export let filters = []
export let bindings = []
export let panel = ClientBindingPanel
export let allowBindings = true
export let datasource
const dispatch = createEventDispatcher()
let rawFilters
$: parseFilters(rawFilters)
$: dispatch("change", enrichFilters(rawFilters))
// Remove field key prefixes and determine which behaviours to use
const parseFilters = filters => {
rawFilters = (filters || []).map(filter => {
const { field } = filter
let newFilter = { ...filter }
delete newFilter.allOr
newFilter.field = dataFilters.removeKeyNumbering(field)
return newFilter
})
}
onMount(() => {
parseFilters(filters)
rawFilters.forEach(filter => {
filter.type =
schemaFields.find(field => field.name === filter.field)?.type ||
filter.type
})
})
// Add field key prefixes and a special metadata filter object to indicate
// how to handle filter behaviour
const enrichFilters = rawFilters => {
let count = 1
return rawFilters
.filter(filter => filter.field)
.map(filter => ({
...filter,
field: `${count++}:${filter.field}`,
}))
.concat(...rawFilters.filter(filter => !filter.field))
}
</script>
<FilterBuilder
bind:filters={rawFilters}
behaviourFilters={true}
{schemaFields}
{datasource}
{allowBindings}
>
<div slot="filtering-hero-content" />
<DrawerBindableInput
let:filter
slot="binding"
disabled={filter.noValue}
title={filter.field}
value={filter.value}
placeholder="Value"
{panel}
{bindings}
on:change={event => {
const indexToUpdate = rawFilters.findIndex(f => f.id === filter.id)
rawFilters[indexToUpdate] = {
...rawFilters[indexToUpdate],
value: event.detail,
}
}}
/>
</FilterBuilder>

View File

@ -1,8 +1,14 @@
<script> <script>
import { notifications, ActionButton, Button, Drawer } from "@budibase/bbui" import {
notifications,
ActionButton,
Button,
Drawer,
DrawerContent,
} from "@budibase/bbui"
import { createEventDispatcher } from "svelte" import { createEventDispatcher } from "svelte"
import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding" import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding"
import FilterDrawer from "./FilterDrawer.svelte" import FilterBuilder from "./FilterBuilder.svelte"
import { selectedScreen } from "stores/builder" import { selectedScreen } from "stores/builder"
const dispatch = createEventDispatcher() const dispatch = createEventDispatcher()
@ -40,14 +46,15 @@
</div> </div>
<Drawer bind:this={drawer} title="Filtering" on:drawerHide on:drawerShow> <Drawer bind:this={drawer} title="Filtering" on:drawerHide on:drawerShow>
<Button cta slot="buttons" on:click={saveFilter}>Save</Button> <Button cta slot="buttons" on:click={saveFilter}>Save</Button>
<FilterDrawer <DrawerContent slot="body">
slot="body" <FilterBuilder
filters={value} filters={value}
{bindings} {bindings}
{schemaFields} {schemaFields}
{datasource} {datasource}
on:change={e => (tempValue = e.detail)} on:change={e => (tempValue = e.detail)}
/> />
</DrawerContent>
</Drawer> </Drawer>
<style> <style>

View File

@ -8,6 +8,7 @@
Input, Input,
notifications, notifications,
} from "@budibase/bbui" } from "@budibase/bbui"
import { downloadFile } from "@budibase/frontend-core"
import { createValidationStore } from "helpers/validation/yup" import { createValidationStore } from "helpers/validation/yup"
export let app export let app
@ -55,40 +56,13 @@
const exportApp = async () => { const exportApp = async () => {
const id = published ? app.prodId : app.devId const id = published ? app.prodId : app.devId
const url = `/api/backups/export?appId=${id}` const url = `/api/backups/export?appId=${id}`
await downloadFile(url, {
try {
const downloaded = await downloadFile(url, {
excludeRows: !includeInternalTablesRows, excludeRows: !includeInternalTablesRows,
encryptPassword: password, encryptPassword: password,
}) })
} if (!downloaded) {
async function downloadFile(url, body) {
try {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
})
if (response.ok) {
const contentDisposition = response.headers.get("Content-Disposition")
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec(
contentDisposition
)
const filename = matches[1].replace(/['"]/g, "")
const url = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = url
link.download = filename
link.click()
URL.revokeObjectURL(url)
} else {
notifications.error("Error exporting the app.") notifications.error("Error exporting the app.")
} }
} catch (error) { } catch (error) {

View File

@ -202,26 +202,6 @@ export const PrettyRelationshipDefinitions = {
ONE: "One row", ONE: "One row",
} }
export const ALLOWABLE_STRING_OPTIONS = [
FIELDS.STRING,
FIELDS.OPTIONS,
FIELDS.LONGFORM,
FIELDS.BARCODEQR,
]
export const ALLOWABLE_STRING_TYPES = ALLOWABLE_STRING_OPTIONS.map(
opt => opt.type
)
export const ALLOWABLE_NUMBER_OPTIONS = [FIELDS.NUMBER, FIELDS.BOOLEAN]
export const ALLOWABLE_NUMBER_TYPES = ALLOWABLE_NUMBER_OPTIONS.map(
opt => opt.type
)
export const SWITCHABLE_TYPES = [
...ALLOWABLE_STRING_TYPES,
...ALLOWABLE_NUMBER_TYPES,
]
export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID export const BUDIBASE_INTERNAL_DB_ID = INTERNAL_TABLE_SOURCE_ID
export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default"
export const BUDIBASE_DATASOURCE_TYPE = "budibase" export const BUDIBASE_DATASOURCE_TYPE = "budibase"

View File

@ -4,8 +4,6 @@ import {
createDatasourceCreationStore, createDatasourceCreationStore,
} from "./datasourceCreation" } from "./datasourceCreation"
import { get } from "svelte/store" import { get } from "svelte/store"
// eslint-disable-next-line no-unused-vars
import { shouldIntegrationFetchTableNames } from "stores/selectors"
vi.mock("stores/selectors", () => ({ vi.mock("stores/selectors", () => ({
shouldIntegrationFetchTableNames: vi.fn(), shouldIntegrationFetchTableNames: vi.fn(),

View File

@ -1,9 +1,9 @@
import { it, expect, describe, beforeEach, vi } from "vitest" import { it, expect, describe, beforeEach, vi } from "vitest"
import { createOnGoogleAuthStore } from "./onGoogleAuth" import { createOnGoogleAuthStore } from "./onGoogleAuth"
import { writable, get } from "svelte/store" import { writable, get } from "svelte/store"
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line
import { params } from "@roxi/routify" import { params } from "@roxi/routify"
// eslint-disable-next-line no-unused-vars // eslint-disable-next-line
import { integrations } from "stores/builder" import { integrations } from "stores/builder"
import { IntegrationTypes } from "constants/backend" import { IntegrationTypes } from "constants/backend"

View File

@ -1,8 +1,8 @@
import { FieldType } from "@budibase/types" import { FieldType } from "@budibase/types"
import { SWITCHABLE_TYPES } from "@budibase/shared-core"
import { get, writable, derived } from "svelte/store" import { get, writable, derived } from "svelte/store"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { API } from "api" import { API } from "api"
import { SWITCHABLE_TYPES } from "constants/backend"
export function createTablesStore() { export function createTablesStore() {
const store = writable({ const store = writable({
@ -64,7 +64,7 @@ export function createTablesStore() {
if ( if (
oldField != null && oldField != null &&
oldField?.type !== field.type && oldField?.type !== field.type &&
SWITCHABLE_TYPES.indexOf(oldField?.type) === -1 !SWITCHABLE_TYPES[oldField?.type]?.includes(field.type)
) { ) {
updatedTable.schema[key] = oldField updatedTable.schema[key] = oldField
} }
@ -148,12 +148,6 @@ export function createTablesStore() {
if (indexes) { if (indexes) {
draft.indexes = indexes draft.indexes = indexes
} }
// Add object to indicate if column is being added
if (draft.schema[field.name] === undefined) {
draft._add = {
name: field.name,
}
}
draft.schema = { draft.schema = {
...draft.schema, ...draft.schema,
[field.name]: cloneDeep(field), [field.name]: cloneDeep(field),

View File

@ -105,7 +105,6 @@ export function getAppService(path: string) {
} }
export function updateDockerComposeService( export function updateDockerComposeService(
// eslint-disable-next-line no-unused-vars
updateFn: (service: DockerCompose) => void updateFn: (service: DockerCompose) => void
) { ) {
const opts = ["docker-compose.yaml", "docker-compose.yml"] const opts = ["docker-compose.yaml", "docker-compose.yml"]

View File

@ -119,6 +119,7 @@
{/if} {/if}
</svelte:head> </svelte:head>
{#if dataLoaded}
<div <div
id="spectrum-root" id="spectrum-root"
lang="en" lang="en"
@ -253,6 +254,7 @@
{/if} {/if}
</div> </div>
<KeyboardManager /> <KeyboardManager />
{/if}
<style> <style>
#spectrum-root { #spectrum-root {

View File

@ -157,6 +157,11 @@
width: 100%; width: 100%;
} }
/* Use normal theme colors for links when using a top nav */
.dropdown:not(.left) .sublinks a {
color: var(--spectrum-alias-text-color);
}
/* Left dropdowns */ /* Left dropdowns */
.dropdown.left .sublinks-wrapper { .dropdown.left .sublinks-wrapper {
display: none; display: none;

View File

@ -1,216 +1,14 @@
<script> <script>
import { import { FilterBuilder } from "@budibase/frontend-core"
Body,
Button,
Combobox,
DatePicker,
Icon,
Input,
Layout,
Select,
} from "@budibase/bbui"
import { generate } from "shortid"
import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { getContext } from "svelte"
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
export let datasource export let datasource
const context = getContext("context")
const BannedTypes = ["link", "attachment", "json"]
$: fieldOptions = (schemaFields ?? [])
.filter(
field =>
!BannedTypes.includes(field.type) ||
(field.type === "formula" && field.formulaType === "static")
)
.map(field => ({
label: field.displayName || field.name,
value: field.name,
}))
const addFilter = () => {
filters = [
...filters,
{
id: generate(),
field: null,
operator: Constants.OperatorOptions.Equals.value,
value: null,
valueType: "Value",
},
]
}
const removeFilter = id => {
filters = filters.filter(field => field.id !== id)
}
const duplicateFilter = id => {
const existingFilter = filters.find(filter => filter.id === id)
const duplicate = { ...existingFilter, id: generate() }
filters = [...filters, duplicate]
}
const onFieldChange = (expression, field) => {
// Update the field type
expression.type = schemaFields.find(x => x.name === field)?.type
expression.externalType = schemaFields.find(
x => x.name === field
)?.externalType
// Ensure a valid operator is set
const validOperators = LuceneUtils.getValidOperatorsForType(
{ type: expression.type },
expression.field,
datasource
).map(x => x.value)
if (!validOperators.includes(expression.operator)) {
expression.operator =
validOperators[0] ?? Constants.OperatorOptions.Equals.value
onOperatorChange(expression, expression.operator)
}
// if changed to an array, change default value to empty array
const idx = filters.findIndex(x => x.field === field)
if (expression.type === "array") {
filters[idx].value = []
} else {
filters[idx].value = null
}
}
const onOperatorChange = (expression, operator) => {
const noValueOptions = [
Constants.OperatorOptions.Empty.value,
Constants.OperatorOptions.NotEmpty.value,
]
expression.noValue = noValueOptions.includes(operator)
if (expression.noValue) {
expression.value = null
}
}
const getFieldOptions = field => {
const schema = schemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
}
const getSchema = filter => {
return schemaFields.find(field => field.name === filter.field)
}
</script> </script>
<div class="container" class:mobile={$context.device.mobile}> <FilterBuilder bind:filters {schemaFields} {datasource} filtersLabel={null}>
<Layout noPadding> <div slot="filtering-hero-content">
<Body size="S">
{#if !filters?.length}
Add your first filter expression.
{:else}
Results are filtered to only those which match all of the following Results are filtered to only those which match all of the following
constraints. constraints.
{/if}
</Body>
{#if filters?.length}
<div class="fields">
{#each filters as filter}
<Select
bind:value={filter.field}
options={fieldOptions}
on:change={e => onFieldChange(filter, e.detail)}
placeholder="Column"
/>
<Select
disabled={!filter.field}
options={LuceneUtils.getValidOperatorsForType(
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)}
bind:value={filter.operator}
on:change={e => onOperatorChange(filter, e.detail)}
placeholder={null}
/>
{#if ["string", "longform", "number", "bigint", "formula"].includes(filter.type)}
<Input disabled={filter.noValue} bind:value={filter.value} />
{:else if ["options", "array"].includes(filter.type)}
<Combobox
disabled={filter.noValue}
options={getFieldOptions(filter.field)}
bind:value={filter.value}
/>
{:else if filter.type === "boolean"}
<Combobox
disabled={filter.noValue}
options={[
{ label: "True", value: "true" },
{ label: "False", value: "false" },
]}
bind:value={filter.value}
/>
{:else if filter.type === "datetime"}
<DatePicker
disabled={filter.noValue}
enableTime={!getSchema(filter).dateOnly}
timeOnly={getSchema(filter).timeOnly}
bind:value={filter.value}
/>
{:else}
<Input disabled />
{/if}
<div class="controls">
<Icon
name="Duplicate"
hoverable
size="S"
on:click={() => duplicateFilter(filter.id)}
/>
<Icon
name="Close"
hoverable
size="S"
on:click={() => removeFilter(filter.id)}
/>
</div> </div>
{/each} </FilterBuilder>
</div>
{/if}
<div>
<Button icon="AddCircle" size="M" secondary on:click={addFilter}>
Add filter
</Button>
</div>
</Layout>
</div>
<style>
.container {
width: 100%;
max-width: 1000px;
margin: 0 auto;
}
.fields {
display: grid;
column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
align-items: center;
grid-template-columns: 1fr 120px 1fr auto auto;
}
.controls {
display: contents;
}
.container.mobile .fields {
grid-template-columns: 1fr;
}
.container.mobile .controls {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: center;
padding: var(--spacing-s) 0;
gap: var(--spacing-s);
}
</style>

View File

@ -58,17 +58,6 @@
} }
} }
const deleteAttachments = async fileList => {
try {
return await API.deleteAttachments({
keys: fileList,
tableId: formContext?.dataSource?.tableId,
})
} catch (error) {
return []
}
}
const handleChange = e => { const handleChange = e => {
const value = fieldApiMapper.set(e.detail) const value = fieldApiMapper.set(e.detail)
const changed = fieldApi.setValue(value) const changed = fieldApi.setValue(value)
@ -98,7 +87,6 @@
error={fieldState.error} error={fieldState.error}
on:change={handleChange} on:change={handleChange}
{processFiles} {processFiles}
{deleteAttachments}
{handleFileTooLarge} {handleFileTooLarge}
{handleTooManyFiles} {handleTooManyFiles}
{maximum} {maximum}

View File

@ -1,5 +1,6 @@
import { get } from "svelte/store" import { get } from "svelte/store"
import download from "downloadjs" import download from "downloadjs"
import { downloadStream } from "@budibase/frontend-core"
import { import {
routeStore, routeStore,
builderStore, builderStore,
@ -400,6 +401,51 @@ const closeSidePanelHandler = () => {
sidePanelStore.actions.close() sidePanelStore.actions.close()
} }
const downloadFileHandler = async action => {
const { url, fileName } = action.parameters
try {
const { type } = action.parameters
if (type === "attachment") {
const { tableId, rowId, attachmentColumn } = action.parameters
const res = await API.downloadAttachment(
tableId,
rowId,
attachmentColumn,
{ suppressErrors: true }
)
await downloadStream(res)
return
}
const response = await fetch(url)
if (!response.ok) {
notificationStore.actions.error(
`Failed to download from '${url}'. Server returned status code: ${response.status}`
)
return
}
const objectUrl = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = objectUrl
link.download = fileName
link.click()
URL.revokeObjectURL(objectUrl)
} catch (e) {
console.error(e)
if (e.status) {
notificationStore.actions.error(
`Failed to download from '${url}'. Server returned status code: ${e.status}`
)
} else {
notificationStore.actions.error(`Failed to download from '${url}'.`)
}
}
}
const handlerMap = { const handlerMap = {
["Fetch Row"]: fetchRowHandler, ["Fetch Row"]: fetchRowHandler,
["Save Row"]: saveRowHandler, ["Save Row"]: saveRowHandler,
@ -418,6 +464,7 @@ const handlerMap = {
["Prompt User"]: promptUserHandler, ["Prompt User"]: promptUserHandler,
["Open Side Panel"]: openSidePanelHandler, ["Open Side Panel"]: openSidePanelHandler,
["Close Side Panel"]: closeSidePanelHandler, ["Close Side Panel"]: closeSidePanelHandler,
["Download File"]: downloadFileHandler,
} }
const confirmTextMap = { const confirmTextMap = {

View File

@ -0,0 +1,17 @@
{
"compilerOptions": {
"allowJs": true,
"strict": true,
"outDir": "dist",
"paths": {
"@budibase/*": [
"../*/src/index.ts",
"../*/src/index.js",
"../*",
"../../node_modules/@budibase/*"
],
"*": ["./src/*"]
}
},
"include": ["src/**/*"]
}

View File

@ -11,6 +11,7 @@
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"dayjs": "^1.10.8", "dayjs": "^1.10.8",
"lodash": "4.17.21", "lodash": "4.17.21",
"shortid": "2.2.15",
"socket.io-client": "^4.6.1" "socket.io-client": "^4.6.1"
} }
} }

View File

@ -61,31 +61,17 @@ export const buildAttachmentEndpoints = API => {
}) })
return { publicUrl } return { publicUrl }
}, },
/** /**
* Deletes attachments from the bucket. * Download an attachment from a row given its column name.
* @param keys the attachments to delete * @param datasourceId the ID of the datasource to download from
* @param tableId the associated table ID * @param rowId the ID of the row to download from
* @param columnName the column name to download
*/ */
deleteAttachments: async ({ keys, tableId }) => { downloadAttachment: async (datasourceId, rowId, columnName, options) => {
return await API.post({ return await API.get({
url: `/api/attachments/${tableId}/delete`, url: `/api/${datasourceId}/rows/${rowId}/attachment/${columnName}`,
body: { parseResponse: response => response,
keys, suppressErrors: options?.suppressErrors,
},
})
},
/**
* Deletes attachments from the builder bucket.
* @param keys the attachments to delete
*/
deleteBuilderAttachments: async keys => {
return await API.post({
url: `/api/attachments/delete`,
body: {
keys,
},
}) })
}, },
} }

View File

@ -4,33 +4,36 @@
Button, Button,
Combobox, Combobox,
DatePicker, DatePicker,
DrawerContent,
Icon, Icon,
Input, Input,
Label,
Layout, Layout,
Multiselect,
Select, Select,
Label,
Multiselect,
} from "@budibase/bbui" } from "@budibase/bbui"
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte" import { FieldType, SearchFilterOperator } from "@budibase/types"
import ClientBindingPanel from "components/common/bindings/ClientBindingPanel.svelte"
import { generate } from "shortid" import { generate } from "shortid"
import { Constants, LuceneUtils } from "@budibase/frontend-core" import { LuceneUtils, Constants } from "@budibase/frontend-core"
import { getFields } from "helpers/searchFields" import { getContext } from "svelte"
import { FieldType } from "@budibase/types"
import { createEventDispatcher, onMount } from "svelte"
import FilterUsers from "./FilterUsers.svelte" import FilterUsers from "./FilterUsers.svelte"
const { OperatorOptions } = Constants
export let schemaFields export let schemaFields
export let filters = [] export let filters = []
export let bindings = []
export let panel = ClientBindingPanel
export let allowBindings = true
export let datasource export let datasource
export let behaviourFilters = false
export let allowBindings = false
export let filtersLabel = "Filters"
$: matchAny = filters?.find(filter => filter.operator === "allOr") != null
$: onEmptyFilter =
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
$: fieldFilters = filters.filter(
filter => filter.operator !== "allOr" && !filter.onEmptyFilter
)
const dispatch = createEventDispatcher()
const { OperatorOptions } = Constants
const KeyedFieldRegex = /\d[0-9]*:/g
const behaviourOptions = [ const behaviourOptions = [
{ value: "and", label: "Match all filters" }, { value: "and", label: "Match all filters" },
{ value: "or", label: "Match any filter" }, { value: "or", label: "Match any filter" },
@ -40,62 +43,18 @@
{ value: "none", label: "Return no rows" }, { value: "none", label: "Return no rows" },
] ]
let rawFilters const context = getContext("context")
let matchAny = false
let onEmptyFilter = "all"
$: parseFilters(filters) $: fieldOptions = (schemaFields ?? [])
$: dispatch("change", enrichFilters(rawFilters, matchAny, onEmptyFilter)) .filter(field => getValidOperatorsForType(field).length)
$: enrichedSchemaFields = getFields(schemaFields || [], { allowLinks: true }) .map(field => ({
$: fieldOptions = enrichedSchemaFields.map(field => field.name) || [] label: field.displayName || field.name,
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"] value: field.name,
// Remove field key prefixes and determine which behaviours to use
const parseFilters = filters => {
matchAny = filters?.find(filter => filter.operator === "allOr") != null
onEmptyFilter =
filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all"
rawFilters = (filters || [])
.filter(filter => filter.operator !== "allOr" && !filter.onEmptyFilter)
.map(filter => {
const { field } = filter
let newFilter = { ...filter }
delete newFilter.allOr
if (typeof field === "string" && field.match(KeyedFieldRegex) != null) {
const parts = field.split(":")
parts.shift()
newFilter.field = parts.join(":")
}
return newFilter
})
}
onMount(() => {
parseFilters(filters)
rawFilters.forEach(filter => {
filter.type =
schemaFields.find(field => field.name === filter.field)?.type ||
filter.type
})
})
// Add field key prefixes and a special metadata filter object to indicate
// how to handle filter behaviour
const enrichFilters = (rawFilters, matchAny, onEmptyFilter) => {
let count = 1
return rawFilters
.filter(filter => filter.field)
.map(filter => ({
...filter,
field: `${count++}:${filter.field}`,
})) }))
.concat(matchAny ? [{ operator: "allOr" }] : [])
.concat([{ onEmptyFilter }])
}
const addFilter = () => { const addFilter = () => {
rawFilters = [ filters = [
...rawFilters, ...(filters || []),
{ {
id: generate(), id: generate(),
field: null, field: null,
@ -107,22 +66,57 @@
} }
const removeFilter = id => { const removeFilter = id => {
rawFilters = rawFilters.filter(field => field.id !== id) filters = filters.filter(field => field.id !== id)
} }
const duplicateFilter = id => { const duplicateFilter = id => {
const existingFilter = rawFilters.find(filter => filter.id === id) const existingFilter = filters.find(filter => filter.id === id)
const duplicate = { ...existingFilter, id: generate() } const duplicate = { ...existingFilter, id: generate() }
rawFilters = [...rawFilters, duplicate] filters = [...filters, duplicate]
}
const onFieldChange = filter => {
const previousType = filter.type
sanitizeTypes(filter)
sanitizeOperator(filter)
sanitizeValue(filter, previousType)
}
const onOperatorChange = filter => {
sanitizeOperator(filter)
sanitizeValue(filter, filter.type)
}
const onValueTypeChange = filter => {
sanitizeValue(filter)
}
const getFieldOptions = field => {
const schema = schemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
} }
const getSchema = filter => { const getSchema = filter => {
return enrichedSchemaFields.find(field => field.name === filter.field) return schemaFields.find(field => field.name === filter.field)
} }
const getValidOperatorsForType = filter => {
if (!filter?.field && !filter?.name) {
return []
}
return LuceneUtils.getValidOperatorsForType(
filter,
filter.field || filter.name,
datasource
)
}
$: valueTypeOptions = allowBindings ? ["Value", "Binding"] : ["Value"]
const sanitizeTypes = filter => { const sanitizeTypes = filter => {
// Update type based on field // Update type based on field
const fieldSchema = enrichedSchemaFields.find(x => x.name === filter.field) const fieldSchema = schemaFields.find(x => x.name === filter.field)
filter.type = fieldSchema?.type filter.type = fieldSchema?.type
filter.subtype = fieldSchema?.subtype filter.subtype = fieldSchema?.subtype
@ -154,68 +148,53 @@
// Ensure array values are properly set and cleared // Ensure array values are properly set and cleared
if (Array.isArray(filter.value)) { if (Array.isArray(filter.value)) {
if (filter.valueType !== "Value" || filter.type !== "array") { if (filter.valueType !== "Value" || filter.type !== FieldType.ARRAY) {
filter.value = null filter.value = null
} }
} else if (filter.type === "array" && filter.valueType === "Value") { } else if (
filter.type === FieldType.ARRAY &&
filter.valueType === "Value"
) {
filter.value = [] filter.value = []
} else if ( } else if (
previousType !== filter.type && previousType !== filter.type &&
(previousType === FieldType.BB_REFERENCE || (previousType === FieldType.BB_REFERENCE ||
filter.type === FieldType.BB_REFERENCE) filter.type === FieldType.BB_REFERENCE)
) { ) {
filter.value = filter.type === "array" ? [] : null filter.value = filter.type === FieldType.ARRAY ? [] : null
} }
} }
const onFieldChange = filter => { function handleAllOr(option) {
const previousType = filter.type filters = filters.filter(f => f.operator !== "allOr")
sanitizeTypes(filter) if (option === "or") {
sanitizeOperator(filter) filters.push({ operator: "allOr" })
sanitizeValue(filter, previousType) }
} }
const onOperatorChange = filter => { function handleOnEmptyFilter(value) {
sanitizeOperator(filter) filters = filters?.filter(filter => !filter.onEmptyFilter)
sanitizeValue(filter, filter.type) filters.push({ onEmptyFilter: value })
}
const onValueTypeChange = filter => {
sanitizeValue(filter)
}
const getFieldOptions = field => {
const schema = enrichedSchemaFields.find(x => x.name === field)
return schema?.constraints?.inclusion || []
}
const getValidOperatorsForType = filter => {
if (!filter?.field) {
return []
}
return LuceneUtils.getValidOperatorsForType(
{ type: filter.type, subtype: filter.subtype },
filter.field,
datasource
)
} }
</script> </script>
<DrawerContent> <div class="container" class:mobile={$context?.device?.mobile}>
<div class="container">
<Layout noPadding> <Layout noPadding>
{#if !rawFilters?.length} {#if fieldOptions?.length}
<Body size="S">Add your first filter expression.</Body> <Body size="S">
{#if !fieldFilters?.length}
Add your first filter expression.
{:else} {:else}
<div class="fields"> <slot name="filtering-hero-content" />
{#if behaviourFilters}
<div class="behaviour-filters">
<Select <Select
label="Behaviour" label="Behaviour"
value={matchAny ? "or" : "and"} value={matchAny ? "or" : "and"}
options={behaviourOptions} options={behaviourOptions}
getOptionLabel={opt => opt.label} getOptionLabel={opt => opt.label}
getOptionValue={opt => opt.value} getOptionValue={opt => opt.value}
on:change={e => (matchAny = e.detail === "or")} on:change={e => handleAllOr(e.detail)}
placeholder={null} placeholder={null}
/> />
{#if datasource?.type === "table"} {#if datasource?.type === "table"}
@ -225,17 +204,23 @@
options={onEmptyOptions} options={onEmptyOptions}
getOptionLabel={opt => opt.label} getOptionLabel={opt => opt.label}
getOptionValue={opt => opt.value} getOptionValue={opt => opt.value}
on:change={e => (onEmptyFilter = e.detail)} on:change={e => handleOnEmptyFilter(e.detail)}
placeholder={null} placeholder={null}
/> />
{/if} {/if}
</div> </div>
{/if}
{/if}
</Body>
{#if fieldFilters?.length}
<div> <div>
{#if filtersLabel}
<div class="filter-label"> <div class="filter-label">
<Label>Filters</Label> <Label>{filtersLabel}</Label>
</div> </div>
<div class="fields"> {/if}
{#each rawFilters as filter} <div class="fields" class:with-bindings={allowBindings}>
{#each fieldFilters as filter}
<Select <Select
bind:value={filter.field} bind:value={filter.field}
options={fieldOptions} options={fieldOptions}
@ -249,6 +234,7 @@
on:change={() => onOperatorChange(filter)} on:change={() => onOperatorChange(filter)}
placeholder={null} placeholder={null}
/> />
{#if allowBindings}
<Select <Select
disabled={filter.noValue || !filter.field} disabled={filter.noValue || !filter.field}
options={valueTypeOptions} options={valueTypeOptions}
@ -256,31 +242,24 @@
on:change={() => onValueTypeChange(filter)} on:change={() => onValueTypeChange(filter)}
placeholder={null} placeholder={null}
/> />
{#if filter.field && filter.valueType === "Binding"} {/if}
<DrawerBindableInput {#if allowBindings && filter.field && filter.valueType === "Binding"}
disabled={filter.noValue} <slot name="binding" {filter} />
title={filter.field} {:else if [FieldType.STRING, FieldType.LONGFORM, FieldType.NUMBER, FieldType.BIGINT, FieldType.FORMULA].includes(filter.type)}
value={filter.value}
placeholder="Value"
{panel}
{bindings}
on:change={event => (filter.value = event.detail)}
/>
{:else if ["string", "longform", "number", "bigint", "formula"].includes(filter.type)}
<Input disabled={filter.noValue} bind:value={filter.value} /> <Input disabled={filter.noValue} bind:value={filter.value} />
{:else if filter.type === "array" || (filter.type === "options" && filter.operator === "oneOf")} {:else if filter.type === FieldType.ARRAY || (filter.type === FieldType.OPTIONS && filter.operator === SearchFilterOperator.ONE_OF)}
<Multiselect <Multiselect
disabled={filter.noValue} disabled={filter.noValue}
options={getFieldOptions(filter.field)} options={getFieldOptions(filter.field)}
bind:value={filter.value} bind:value={filter.value}
/> />
{:else if filter.type === "options"} {:else if filter.type === FieldType.OPTIONS}
<Combobox <Combobox
disabled={filter.noValue} disabled={filter.noValue}
options={getFieldOptions(filter.field)} options={getFieldOptions(filter.field)}
bind:value={filter.value} bind:value={filter.value}
/> />
{:else if filter.type === "boolean"} {:else if filter.type === FieldType.BOOLEAN}
<Combobox <Combobox
disabled={filter.noValue} disabled={filter.noValue}
options={[ options={[
@ -289,7 +268,7 @@
]} ]}
bind:value={filter.value} bind:value={filter.value}
/> />
{:else if filter.type === "datetime"} {:else if filter.type === FieldType.DATETIME}
<DatePicker <DatePicker
disabled={filter.noValue} disabled={filter.noValue}
enableTime={!getSchema(filter)?.dateOnly} enableTime={!getSchema(filter)?.dateOnly}
@ -306,8 +285,9 @@
disabled={filter.noValue} disabled={filter.noValue}
/> />
{:else} {:else}
<DrawerBindableInput disabled /> <Input disabled />
{/if} {/if}
<div class="controls">
<Icon <Icon
name="Duplicate" name="Duplicate"
hoverable hoverable
@ -320,18 +300,21 @@
size="S" size="S"
on:click={() => removeFilter(filter.id)} on:click={() => removeFilter(filter.id)}
/> />
</div>
{/each} {/each}
</div> </div>
</div> </div>
{/if} {/if}
<div class="bottom"> <div>
<Button icon="AddCircle" size="M" secondary on:click={addFilter}> <Button icon="AddCircle" size="M" secondary on:click={addFilter}>
Add filter Add filter
</Button> </Button>
</div> </div>
{:else}
<Body size="S">None of the table column can be used for filtering.</Body>
{/if}
</Layout> </Layout>
</div> </div>
</DrawerContent>
<style> <style>
.container { .container {
@ -339,22 +322,42 @@
max-width: 1000px; max-width: 1000px;
margin: 0 auto; margin: 0 auto;
} }
.fields { .fields {
display: grid; display: grid;
column-gap: var(--spacing-l); column-gap: var(--spacing-l);
row-gap: var(--spacing-s); row-gap: var(--spacing-s);
align-items: center; align-items: center;
grid-template-columns: 1fr 120px 1fr auto auto;
}
.fields.with-bindings {
grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px; grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px;
} }
.controls {
display: contents;
}
.container.mobile .fields {
grid-template-columns: 1fr;
}
.container.mobile .controls {
display: flex;
flex-direction: row;
justify-content: flex-start;
align-items: center;
padding: var(--spacing-s) 0;
gap: var(--spacing-s);
}
.filter-label { .filter-label {
margin-bottom: var(--spacing-s); margin-bottom: var(--spacing-s);
} }
.bottom { .behaviour-filters {
display: flex; display: grid;
justify-content: space-between; column-gap: var(--spacing-l);
row-gap: var(--spacing-s);
align-items: center; align-items: center;
grid-template-columns: minmax(150px, 1fr) 170px 120px minmax(150px, 1fr) 16px 16px;
} }
</style> </style>

View File

@ -1,9 +1,9 @@
<script> <script>
import { Select, Multiselect } from "@budibase/bbui" import { Select, Multiselect } from "@budibase/bbui"
import { fetchData } from "@budibase/frontend-core" import { fetchData } from "@budibase/frontend-core"
import { createAPIClient } from "../api"
import { API } from "api" export let API = createAPIClient()
export let value = null export let value = null
export let disabled export let disabled
export let multiselect = false export let multiselect = false

View File

@ -61,14 +61,6 @@
} }
} }
const deleteAttachments = async fileList => {
try {
return await API.deleteBuilderAttachments(fileList)
} catch (error) {
return []
}
}
onMount(() => { onMount(() => {
api = { api = {
focus: () => open(), focus: () => open(),
@ -101,7 +93,6 @@
on:change={e => onChange(e.detail)} on:change={e => onChange(e.detail)}
maximum={maximum || schema.constraints?.length?.maximum} maximum={maximum || schema.constraints?.length?.maximum}
{processFiles} {processFiles}
{deleteAttachments}
{handleFileTooLarge} {handleFileTooLarge}
/> />
</div> </div>

View File

@ -6,3 +6,4 @@ export { default as UserAvatars } from "./UserAvatars.svelte"
export { default as Updating } from "./Updating.svelte" export { default as Updating } from "./Updating.svelte"
export { Grid } from "./grid" export { Grid } from "./grid"
export { default as ClientAppSkeleton } from "./ClientAppSkeleton.svelte" export { default as ClientAppSkeleton } from "./ClientAppSkeleton.svelte"
export { default as FilterBuilder } from "./FilterBuilder.svelte"

View File

@ -348,8 +348,7 @@ export default class DataFetch {
* Determine the feature flag for this datasource definition * Determine the feature flag for this datasource definition
* @param definition * @param definition
*/ */
// eslint-disable-next-line no-unused-vars determineFeatureFlags(_definition) {
determineFeatureFlags(definition) {
return { return {
supportsSearch: false, supportsSearch: false,
supportsSort: false, supportsSort: false,

View File

@ -1,3 +1,5 @@
const extractFileNameRegex = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/
export function downloadText(filename, text) { export function downloadText(filename, text) {
if (typeof text === "object") { if (typeof text === "object") {
text = JSON.stringify(text) text = JSON.stringify(text)
@ -17,9 +19,7 @@ export async function downloadStream(streamResponse) {
const contentDisposition = streamResponse.headers.get("Content-Disposition") const contentDisposition = streamResponse.headers.get("Content-Disposition")
const matches = /filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/.exec( const matches = extractFileNameRegex.exec(contentDisposition)
contentDisposition
)
const filename = matches[1].replace(/['"]/g, "") const filename = matches[1].replace(/['"]/g, "")
@ -34,3 +34,33 @@ export async function downloadStream(streamResponse) {
URL.revokeObjectURL(blobUrl) URL.revokeObjectURL(blobUrl)
} }
export async function downloadFile(url, body) {
const response = await fetch(url, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify(body),
})
if (!response.ok) {
return false
} else {
const contentDisposition = response.headers.get("Content-Disposition")
const matches = extractFileNameRegex.exec(contentDisposition)
const filename = matches[1].replace(/['"]/g, "")
const url = URL.createObjectURL(await response.blob())
const link = document.createElement("a")
link.href = url
link.download = filename
link.click()
URL.revokeObjectURL(url)
return true
}
}

@ -1 +1 @@
Subproject commit ef186d00241f96037f9fd34d7a3826041977ab3a Subproject commit 06b1064f7e2f7cac5d4bef2ee999796a2a1f0f2c

View File

@ -50,10 +50,10 @@
"@apidevtools/swagger-parser": "10.0.3", "@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "0.0.0", "@budibase/backend-core": "0.0.0",
"@budibase/client": "0.0.0", "@budibase/client": "0.0.0",
"@budibase/frontend-core": "0.0.0",
"@budibase/pro": "0.0.0", "@budibase/pro": "0.0.0",
"@budibase/shared-core": "0.0.0", "@budibase/shared-core": "0.0.0",
"@budibase/string-templates": "0.0.0", "@budibase/string-templates": "0.0.0",
"@budibase/frontend-core": "0.0.0",
"@budibase/types": "0.0.0", "@budibase/types": "0.0.0",
"@bull-board/api": "5.10.2", "@bull-board/api": "5.10.2",
"@bull-board/koa": "5.10.2", "@bull-board/koa": "5.10.2",
@ -63,6 +63,7 @@
"@socket.io/redis-adapter": "^8.2.1", "@socket.io/redis-adapter": "^8.2.1",
"airtable": "0.10.1", "airtable": "0.10.1",
"arangojs": "7.2.0", "arangojs": "7.2.0",
"archiver": "7.0.1",
"aws-sdk": "2.1030.0", "aws-sdk": "2.1030.0",
"bcrypt": "5.1.0", "bcrypt": "5.1.0",
"bcryptjs": "2.4.3", "bcryptjs": "2.4.3",
@ -124,6 +125,7 @@
"@babel/preset-env": "7.16.11", "@babel/preset-env": "7.16.11",
"@swc/core": "1.3.71", "@swc/core": "1.3.71",
"@swc/jest": "0.2.27", "@swc/jest": "0.2.27",
"@types/archiver": "6.0.2",
"@types/global-agent": "2.1.1", "@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5", "@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.5", "@types/jest": "29.5.5",

View File

@ -4,6 +4,7 @@ services:
# user: sa # user: sa
# database: master # database: master
mssql: mssql:
# platform: linux/amd64
image: bb/mssql image: bb/mssql
build: build:
context: . context: .

View File

@ -320,6 +320,7 @@ async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
"theme", "theme",
"customTheme", "customTheme",
"icon", "icon",
"snippets",
] ]
keys.forEach(key => { keys.forEach(key => {
if (existing[key]) { if (existing[key]) {

View File

@ -36,7 +36,6 @@ import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates" import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core" import { db as dbCore } from "@budibase/backend-core"
import AliasTables from "./alias"
import sdk from "../../../sdk" import sdk from "../../../sdk"
import env from "../../../environment" import env from "../../../environment"
@ -120,6 +119,9 @@ async function removeManyToManyRelationships(
endpoint: getEndpoint(tableId, Operation.DELETE), endpoint: getEndpoint(tableId, Operation.DELETE),
body: { [colName]: null }, body: { [colName]: null },
filters, filters,
meta: {
table,
},
}) })
} else { } else {
return [] return []
@ -134,6 +136,9 @@ async function removeOneToManyRelationships(rowId: string, table: Table) {
return getDatasourceAndQuery({ return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE), endpoint: getEndpoint(tableId, Operation.UPDATE),
filters, filters,
meta: {
table,
},
}) })
} else { } else {
return [] return []
@ -249,6 +254,9 @@ export class ExternalRequest<T extends Operation> {
const response = await getDatasourceAndQuery({ const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ), endpoint: getEndpoint(table._id!, Operation.READ),
filters: buildFilters(rowId, {}, table), filters: buildFilters(rowId, {}, table),
meta: {
table,
},
}) })
if (Array.isArray(response) && response.length > 0) { if (Array.isArray(response) && response.length > 0) {
return response[0] return response[0]
@ -366,36 +374,45 @@ export class ExternalRequest<T extends Operation> {
) { ) {
continue continue
} }
let tableId: string | undefined, let relatedTableId: string | undefined,
lookupField: string | undefined, lookupField: string | undefined,
fieldName: string | undefined fieldName: string | undefined
if (isManyToMany(field)) { if (isManyToMany(field)) {
tableId = field.through relatedTableId = field.through
lookupField = primaryKey lookupField = primaryKey
fieldName = field.throughTo || primaryKey fieldName = field.throughTo || primaryKey
} else if (isManyToOne(field)) { } else if (isManyToOne(field)) {
tableId = field.tableId relatedTableId = field.tableId
lookupField = field.foreignKey lookupField = field.foreignKey
fieldName = field.fieldName fieldName = field.fieldName
} }
if (!tableId || !lookupField || !fieldName) { if (!relatedTableId || !lookupField || !fieldName) {
throw new Error( throw new Error(
"Unable to lookup relationships - undefined column properties." "Unable to lookup relationships - undefined column properties."
) )
} }
const { tableName: relatedTableName } = breakExternalTableId(tableId) const { tableName: relatedTableName } =
breakExternalTableId(relatedTableId)
// @ts-ignore // @ts-ignore
const linkPrimaryKey = this.tables[relatedTableName].primary[0] const linkPrimaryKey = this.tables[relatedTableName].primary[0]
if (!lookupField || !row[lookupField]) { if (!lookupField || !row[lookupField]) {
continue continue
} }
const endpoint = getEndpoint(relatedTableId, Operation.READ)
const relatedTable = this.tables[endpoint.entityId]
if (!relatedTable) {
throw new Error("unable to find related table")
}
const response = await getDatasourceAndQuery({ const response = await getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.READ), endpoint: endpoint,
filters: { filters: {
equal: { equal: {
[fieldName]: row[lookupField], [fieldName]: row[lookupField],
}, },
}, },
meta: {
table: relatedTable,
},
}) })
// this is the response from knex if no rows found // this is the response from knex if no rows found
const rows: Row[] = const rows: Row[] =
@ -403,7 +420,11 @@ export class ExternalRequest<T extends Operation> {
const storeTo = isManyToMany(field) const storeTo = isManyToMany(field)
? field.throughFrom || linkPrimaryKey ? field.throughFrom || linkPrimaryKey
: fieldName : fieldName
related[storeTo] = { rows, isMany: isManyToMany(field), tableId } related[storeTo] = {
rows,
isMany: isManyToMany(field),
tableId: relatedTableId,
}
} }
return related return related
} }
@ -471,6 +492,9 @@ export class ExternalRequest<T extends Operation> {
// if we're doing many relationships then we're writing, only one response // if we're doing many relationships then we're writing, only one response
body, body,
filters: buildFilters(id, {}, linkTable), filters: buildFilters(id, {}, linkTable),
meta: {
table: linkTable,
},
}) })
) )
} else { } else {
@ -618,7 +642,7 @@ export class ExternalRequest<T extends Operation> {
if (env.SQL_ALIASING_DISABLE) { if (env.SQL_ALIASING_DISABLE) {
response = await getDatasourceAndQuery(json) response = await getDatasourceAndQuery(json)
} else { } else {
const aliasing = new AliasTables(Object.keys(this.tables)) const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
response = await aliasing.queryWithAliasing(json) response = await aliasing.queryWithAliasing(json)
} }

View File

@ -1,4 +1,8 @@
import stream from "stream"
import archiver from "archiver"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
import { objectStore } from "@budibase/backend-core"
import * as internal from "./internal" import * as internal from "./internal"
import * as external from "./external" import * as external from "./external"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
@ -9,10 +13,12 @@ import {
DeleteRows, DeleteRows,
ExportRowsRequest, ExportRowsRequest,
ExportRowsResponse, ExportRowsResponse,
FieldType,
GetRowResponse, GetRowResponse,
PatchRowRequest, PatchRowRequest,
PatchRowResponse, PatchRowResponse,
Row, Row,
RowAttachment,
RowSearchParams, RowSearchParams,
SearchRowRequest, SearchRowRequest,
SearchRowResponse, SearchRowResponse,
@ -251,3 +257,59 @@ export const exportRows = async (
ctx.attachment(fileName) ctx.attachment(fileName)
ctx.body = apiFileReturn(content) ctx.body = apiFileReturn(content)
} }
export async function downloadAttachment(ctx: UserCtx) {
const { columnName } = ctx.params
const tableId = utils.getTableId(ctx)
const row = await pickApi(tableId).find(ctx)
const table = await sdk.tables.getTable(tableId)
const columnSchema = table.schema[columnName]
if (!columnSchema) {
ctx.throw(400, `'${columnName}' is not valid`)
}
const columnType = columnSchema.type
if (
columnType !== FieldType.ATTACHMENTS &&
columnType !== FieldType.ATTACHMENT_SINGLE
) {
ctx.throw(404, `'${columnName}' is not valid attachment column`)
}
const attachments: RowAttachment[] =
columnType === FieldType.ATTACHMENTS ? row[columnName] : [row[columnName]]
if (!attachments?.length) {
ctx.throw(404)
}
if (attachments.length === 1) {
const attachment = attachments[0]
ctx.attachment(attachment.name)
ctx.body = await objectStore.getReadStream(
objectStore.ObjectStoreBuckets.APPS,
attachment.key
)
} else {
const passThrough = new stream.PassThrough()
const archive = archiver.create("zip")
archive.pipe(passThrough)
for (const attachment of attachments) {
const attachmentStream = await objectStore.getReadStream(
objectStore.ObjectStoreBuckets.APPS,
attachment.key
)
archive.append(attachmentStream, { name: attachment.name })
}
const displayName = row[table.primaryDisplay || "_id"]
ctx.attachment(`${displayName}_${columnName}.zip`)
archive.finalize()
ctx.body = passThrough
ctx.type = "zip"
}
}

View File

@ -62,12 +62,12 @@ export function basicProcessing({
row, row,
table, table,
isLinked, isLinked,
internal, sqs,
}: { }: {
row: Row row: Row
table: Table table: Table
isLinked: boolean isLinked: boolean
internal?: boolean sqs?: boolean
}): Row { }): Row {
const thisRow: Row = {} const thisRow: Row = {}
// filter the row down to what is actually the row (not joined) // filter the row down to what is actually the row (not joined)
@ -84,12 +84,13 @@ export function basicProcessing({
thisRow[fieldName] = value thisRow[fieldName] = value
} }
} }
if (!internal) { if (!sqs) {
thisRow._id = generateIdForRow(row, table, isLinked) thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id thisRow.tableId = table._id
thisRow._rev = "rev" thisRow._rev = "rev"
} else { } else {
for (let internalColumn of CONSTANT_INTERNAL_ROW_COLS) { const columns = Object.keys(table.schema)
for (let internalColumn of [...CONSTANT_INTERNAL_ROW_COLS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({ thisRow[internalColumn] = extractFieldValue({
row, row,
tableName: table._id!, tableName: table._id!,

View File

@ -51,11 +51,11 @@ export async function updateRelationshipColumns(
continue continue
} }
let linked = await basicProcessing({ let linked = basicProcessing({
row, row,
table: linkedTable, table: linkedTable,
isLinked: true, isLinked: true,
internal: opts?.sqs, sqs: opts?.sqs,
}) })
if (!linked._id) { if (!linked._id) {
continue continue

View File

@ -132,6 +132,7 @@ export async function sqlOutputProcessing(
let rowId = row._id let rowId = row._id
if (opts?.sqs) { if (opts?.sqs) {
rowId = getInternalRowId(row, table) rowId = getInternalRowId(row, table)
row._id = rowId
} else if (!rowId) { } else if (!rowId) {
rowId = generateIdForRow(row, table) rowId = generateIdForRow(row, table)
row._id = rowId row._id = rowId
@ -153,7 +154,7 @@ export async function sqlOutputProcessing(
row, row,
table, table,
isLinked: false, isLinked: false,
internal: opts?.sqs, sqs: opts?.sqs,
}), }),
table table
) )
@ -167,7 +168,8 @@ export async function sqlOutputProcessing(
tables, tables,
row, row,
finalRows, finalRows,
relationships relationships,
opts
) )
} }

View File

@ -127,13 +127,6 @@ export const uploadFile = async function (
) )
} }
export const deleteObjects = async function (ctx: Ctx) {
ctx.body = await objectStore.deleteFiles(
ObjectStoreBuckets.APPS,
ctx.request.body.keys
)
}
const requiresMigration = async (ctx: Ctx) => { const requiresMigration = async (ctx: Ctx) => {
const appId = context.getAppId() const appId = context.getAppId()
if (!appId) { if (!appId) {

View File

@ -22,6 +22,7 @@ export async function makeTableRequest(
operation, operation,
}, },
meta: { meta: {
table,
tables, tables,
}, },
table, table,

View File

@ -31,7 +31,6 @@ export async function save(
renaming?: RenameColumn renaming?: RenameColumn
) { ) {
const inputs = ctx.request.body const inputs = ctx.request.body
const adding = inputs?._add
// can't do this right now // can't do this right now
delete inputs.rows delete inputs.rows
const tableId = ctx.request.body._id const tableId = ctx.request.body._id
@ -44,7 +43,7 @@ export async function save(
const { datasource, table } = await sdk.tables.external.save( const { datasource, table } = await sdk.tables.external.save(
datasourceId!, datasourceId!,
inputs, inputs,
{ tableId, renaming, adding } { tableId, renaming }
) )
builderSocket?.emitDatasourceUpdate(ctx, datasource) builderSocket?.emitDatasourceUpdate(ctx, datasource)
return table return table

View File

@ -77,11 +77,6 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const renaming = ctx.request.body._rename const renaming = ctx.request.body._rename
const api = pickApi({ table }) const api = pickApi({ table })
// do not pass _rename or _add if saving to CouchDB
if (api === internal) {
delete ctx.request.body._add
delete ctx.request.body._rename
}
let savedTable = await api.save(ctx, renaming) let savedTable = await api.save(ctx, renaming)
if (!table._id) { if (!table._id) {
savedTable = sdk.tables.enrichViewSchemas(savedTable) savedTable = sdk.tables.enrichViewSchemas(savedTable)

View File

@ -16,7 +16,7 @@ export async function save(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>, ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn renaming?: RenameColumn
) { ) {
const { rows, ...rest } = ctx.request.body const { _rename, rows, ...rest } = ctx.request.body
let tableToSave: Table = { let tableToSave: Table = {
_id: generateTableID(), _id: generateTableID(),
...rest, ...rest,

View File

@ -77,6 +77,12 @@ router
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
rowController.exportRows rowController.exportRows
) )
.get(
"/api/:sourceId/rows/:rowId/attachment/:columnName",
paramSubResource("sourceId", "rowId"),
authorized(PermissionType.TABLE, PermissionLevel.READ),
rowController.downloadAttachment
)
router.post( router.post(
"/api/v2/views/:viewId/search", "/api/v2/views/:viewId/search",

View File

@ -32,11 +32,6 @@ router
.get("/builder/:file*", controller.serveBuilder) .get("/builder/:file*", controller.serveBuilder)
.get("/api/assets/client", controller.serveClientLibrary) .get("/api/assets/client", controller.serveClientLibrary)
.post("/api/attachments/process", authorized(BUILDER), controller.uploadFile) .post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
.post(
"/api/attachments/delete",
authorized(BUILDER),
controller.deleteObjects
)
.post("/api/beta/:feature", controller.toggleBetaUiFeature) .post("/api/beta/:feature", controller.toggleBetaUiFeature)
.post( .post(
"/api/attachments/:tableId/upload", "/api/attachments/:tableId/upload",
@ -44,12 +39,6 @@ router
authorized(PermissionType.TABLE, PermissionLevel.WRITE), authorized(PermissionType.TABLE, PermissionLevel.WRITE),
controller.uploadFile controller.uploadFile
) )
.post(
"/api/attachments/:tableId/delete",
paramResource("tableId"),
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
controller.deleteObjects
)
.get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview) .get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview)
.get("/app/:appUrl/:path*", controller.serveApp) .get("/app/:appUrl/:path*", controller.serveApp)
.get("/:appId/:path*", controller.serveApp) .get("/:appId/:path*", controller.serveApp)

View File

@ -6,7 +6,19 @@ import sdk from "../../../sdk"
import tk from "timekeeper" import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests" import { mocks } from "@budibase/backend-core/tests"
import { QueryPreview, SourceName } from "@budibase/types" import {
Datasource,
FieldSchema,
FieldSubtype,
FieldType,
QueryPreview,
RelationshipType,
SourceName,
Table,
TableSchema,
} from "@budibase/types"
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
import { tableForDatasource } from "../../../tests/utilities/structures"
tk.freeze(mocks.date.MOCK_DATE) tk.freeze(mocks.date.MOCK_DATE)
@ -223,4 +235,152 @@ describe("/datasources", () => {
}) })
}) })
}) })
describe.each([
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("fetch schema (%s)", (_, dsProvider) => {
beforeAll(async () => {
datasource = await config.api.datasource.create(await dsProvider)
})
it("fetching schema will not drop tables or columns", async () => {
const datasourceId = datasource!._id!
const simpleTable = await config.api.table.save(
tableForDatasource(datasource, {
name: "simple",
schema: {
name: {
name: "name",
type: FieldType.STRING,
},
},
})
)
type SupportedSqlTypes =
| FieldType.STRING
| FieldType.BARCODEQR
| FieldType.LONGFORM
| FieldType.OPTIONS
| FieldType.DATETIME
| FieldType.NUMBER
| FieldType.BOOLEAN
| FieldType.FORMULA
| FieldType.BIGINT
| FieldType.BB_REFERENCE
| FieldType.LINK
| FieldType.ARRAY
const fullSchema: {
[type in SupportedSqlTypes]: FieldSchema & { type: type }
} = {
[FieldType.STRING]: {
name: "string",
type: FieldType.STRING,
constraints: {
presence: true,
},
},
[FieldType.LONGFORM]: {
name: "longform",
type: FieldType.LONGFORM,
},
[FieldType.OPTIONS]: {
name: "options",
type: FieldType.OPTIONS,
constraints: {
presence: { allowEmpty: false },
},
},
[FieldType.NUMBER]: {
name: "number",
type: FieldType.NUMBER,
},
[FieldType.BOOLEAN]: {
name: "boolean",
type: FieldType.BOOLEAN,
},
[FieldType.ARRAY]: {
name: "array",
type: FieldType.ARRAY,
},
[FieldType.DATETIME]: {
name: "datetime",
type: FieldType.DATETIME,
dateOnly: true,
timeOnly: false,
},
[FieldType.LINK]: {
name: "link",
type: FieldType.LINK,
tableId: simpleTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "link",
},
[FieldType.FORMULA]: {
name: "formula",
type: FieldType.FORMULA,
formula: "any formula",
},
[FieldType.BARCODEQR]: {
name: "barcodeqr",
type: FieldType.BARCODEQR,
},
[FieldType.BIGINT]: {
name: "bigint",
type: FieldType.BIGINT,
},
[FieldType.BB_REFERENCE]: {
name: "bb_reference",
type: FieldType.BB_REFERENCE,
subtype: FieldSubtype.USERS,
},
}
await config.api.table.save(
tableForDatasource(datasource, {
name: "full",
schema: fullSchema,
})
)
const persisted = await config.api.datasource.get(datasourceId)
await config.api.datasource.fetchSchema(datasourceId)
const updated = await config.api.datasource.get(datasourceId)
const expected: Datasource = {
...persisted,
entities:
persisted?.entities &&
Object.entries(persisted.entities).reduce<Record<string, Table>>(
(acc, [tableName, table]) => {
acc[tableName] = {
...table,
primaryDisplay: expect.not.stringMatching(
new RegExp(`^${table.primaryDisplay || ""}$`)
),
schema: Object.entries(table.schema).reduce<TableSchema>(
(acc, [fieldName, field]) => {
acc[fieldName] = expect.objectContaining({
...field,
})
return acc
},
{}
),
}
return acc
},
{}
),
_rev: expect.any(String),
}
expect(updated).toEqual(expected)
})
})
}) })

View File

@ -4,6 +4,7 @@ import {
Query, Query,
QueryPreview, QueryPreview,
SourceName, SourceName,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import * as setup from "../utilities" import * as setup from "../utilities"
import { import {
@ -740,12 +741,25 @@ describe.each(
}) })
describe("query through datasource", () => { describe("query through datasource", () => {
it("should be able to query a pg datasource", async () => { it("should be able to query the datasource", async () => {
const entityId = "test_table"
await config.api.datasource.update({
...datasource,
entities: {
[entityId]: {
name: entityId,
schema: {},
type: "table",
sourceId: datasource._id!,
sourceType: TableSourceType.EXTERNAL,
},
},
})
const res = await config.api.datasource.query({ const res = await config.api.datasource.query({
endpoint: { endpoint: {
datasourceId: datasource._id!, datasourceId: datasource._id!,
operation: Operation.READ, operation: Operation.READ,
entityId: "test_table", entityId,
}, },
resource: { resource: {
fields: ["id", "name"], fields: ["id", "name"],

View File

@ -8,6 +8,8 @@ import {
FieldType, FieldType,
RowSearchParams, RowSearchParams,
SearchFilters, SearchFilters,
SortOrder,
SortType,
Table, Table,
TableSchema, TableSchema,
} from "@budibase/types" } from "@budibase/types"
@ -24,6 +26,7 @@ describe.each([
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/api/:sourceId/search (%s)", (name, dsProvider) => { ])("/api/:sourceId/search (%s)", (name, dsProvider) => {
const isSqs = name === "internal-sqs" const isSqs = name === "internal-sqs"
const isInternal = name === "internal"
const config = setup.getConfig() const config = setup.getConfig()
let envCleanup: (() => void) | undefined let envCleanup: (() => void) | undefined
@ -62,7 +65,32 @@ describe.each([
class SearchAssertion { class SearchAssertion {
constructor(private readonly query: RowSearchParams) {} constructor(private readonly query: RowSearchParams) {}
async toFind(expectedRows: any[]) { // Asserts that the query returns rows matching exactly the set of rows
// passed in. The order of the rows matters. Rows returned in an order
// different to the one passed in will cause the assertion to fail. Extra
// rows returned by the query will also cause the assertion to fail.
async toMatchExactly(expectedRows: any[]) {
const { rows: foundRows } = await config.api.row.search(table._id!, {
...this.query,
tableId: table._id!,
})
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(expectedRows.length)
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toEqual(
expectedRows.map((expectedRow: any) =>
expect.objectContaining(
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
)
)
)
}
// Asserts that the query returns rows matching exactly the set of rows
// passed in. The order of the rows is not important, but extra rows will
// cause the assertion to fail.
async toContainExactly(expectedRows: any[]) {
const { rows: foundRows } = await config.api.row.search(table._id!, { const { rows: foundRows } = await config.api.row.search(table._id!, {
...this.query, ...this.query,
tableId: table._id!, tableId: table._id!,
@ -82,8 +110,39 @@ describe.each([
) )
} }
// Asserts that the query returns rows matching the set of rows passed in.
// The order of the rows is not important. Extra rows will not cause the
// assertion to fail.
async toContain(expectedRows: any[]) {
const { rows: foundRows } = await config.api.row.search(table._id!, {
...this.query,
tableId: table._id!,
})
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toEqual(
expect.arrayContaining(
expectedRows.map((expectedRow: any) =>
expect.objectContaining(
foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
)
)
)
)
}
async toFindNothing() { async toFindNothing() {
await this.toFind([]) await this.toContainExactly([])
}
async toHaveLength(length: number) {
const { rows: foundRows } = await config.api.row.search(table._id!, {
...this.query,
tableId: table._id!,
})
// eslint-disable-next-line jest/no-standalone-expect
expect(foundRows).toHaveLength(length)
} }
} }
@ -105,28 +164,33 @@ describe.each([
describe("misc", () => { describe("misc", () => {
it("should return all if no query is passed", () => it("should return all if no query is passed", () =>
expectSearch({} as RowSearchParams).toFind([ expectSearch({} as RowSearchParams).toContainExactly([
{ name: "foo" }, { name: "foo" },
{ name: "bar" }, { name: "bar" },
])) ]))
it("should return all if empty query is passed", () => it("should return all if empty query is passed", () =>
expectQuery({}).toFind([{ name: "foo" }, { name: "bar" }])) expectQuery({}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
it("should return all if onEmptyFilter is RETURN_ALL", () => it("should return all if onEmptyFilter is RETURN_ALL", () =>
expectQuery({ expectQuery({
onEmptyFilter: EmptyFilterOption.RETURN_ALL, onEmptyFilter: EmptyFilterOption.RETURN_ALL,
}).toFind([{ name: "foo" }, { name: "bar" }])) }).toContainExactly([{ name: "foo" }, { name: "bar" }]))
it("should return nothing if onEmptyFilter is RETURN_NONE", () => it("should return nothing if onEmptyFilter is RETURN_NONE", () =>
expectQuery({ expectQuery({
onEmptyFilter: EmptyFilterOption.RETURN_NONE, onEmptyFilter: EmptyFilterOption.RETURN_NONE,
}).toFindNothing()) }).toFindNothing())
it("should respect limit", () =>
expectSearch({ limit: 1, paginate: true, query: {} }).toHaveLength(1))
}) })
describe("equal", () => { describe("equal", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ equal: { name: "foo" } }).toFind([{ name: "foo" }])) expectQuery({ equal: { name: "foo" } }).toContainExactly([
{ name: "foo" },
]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ equal: { name: "none" } }).toFindNothing()) expectQuery({ equal: { name: "none" } }).toFindNothing())
@ -134,15 +198,21 @@ describe.each([
describe("notEqual", () => { describe("notEqual", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ notEqual: { name: "foo" } }).toFind([{ name: "bar" }])) expectQuery({ notEqual: { name: "foo" } }).toContainExactly([
{ name: "bar" },
]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ notEqual: { name: "bar" } }).toFind([{ name: "foo" }])) expectQuery({ notEqual: { name: "bar" } }).toContainExactly([
{ name: "foo" },
]))
}) })
describe("oneOf", () => { describe("oneOf", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ oneOf: { name: ["foo"] } }).toFind([{ name: "foo" }])) expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([
{ name: "foo" },
]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()) expectQuery({ oneOf: { name: ["none"] } }).toFindNothing())
@ -150,11 +220,69 @@ describe.each([
describe("fuzzy", () => { describe("fuzzy", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ fuzzy: { name: "oo" } }).toFind([{ name: "foo" }])) expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([
{ name: "foo" },
]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ fuzzy: { name: "none" } }).toFindNothing()) expectQuery({ fuzzy: { name: "none" } }).toFindNothing())
}) })
describe("range", () => {
it("successfully finds multiple rows", () =>
expectQuery({
range: { name: { low: "a", high: "z" } },
}).toContainExactly([{ name: "bar" }, { name: "foo" }]))
it("successfully finds a row with a high bound", () =>
expectQuery({
range: { name: { low: "a", high: "c" } },
}).toContainExactly([{ name: "bar" }]))
it("successfully finds a row with a low bound", () =>
expectQuery({
range: { name: { low: "f", high: "z" } },
}).toContainExactly([{ name: "foo" }]))
it("successfully finds no rows", () =>
expectQuery({
range: { name: { low: "g", high: "h" } },
}).toFindNothing())
})
describe("sort", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "name",
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "name",
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
describe("sortType STRING", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "name",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "name",
sortType: SortType.STRING,
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
})
})
}) })
describe("numbers", () => { describe("numbers", () => {
@ -167,7 +295,7 @@ describe.each([
describe("equal", () => { describe("equal", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ equal: { age: 1 } }).toFind([{ age: 1 }])) expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ equal: { age: 2 } }).toFindNothing()) expectQuery({ equal: { age: 2 } }).toFindNothing())
@ -175,15 +303,15 @@ describe.each([
describe("notEqual", () => { describe("notEqual", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ notEqual: { age: 1 } }).toFind([{ age: 10 }])) expectQuery({ notEqual: { age: 1 } }).toContainExactly([{ age: 10 }]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ notEqual: { age: 10 } }).toFind([{ age: 1 }])) expectQuery({ notEqual: { age: 10 } }).toContainExactly([{ age: 1 }]))
}) })
describe("oneOf", () => { describe("oneOf", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ oneOf: { age: [1] } }).toFind([{ age: 1 }])) expectQuery({ oneOf: { age: [1] } }).toContainExactly([{ age: 1 }]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { age: [2] } }).toFindNothing()) expectQuery({ oneOf: { age: [2] } }).toFindNothing())
@ -193,17 +321,70 @@ describe.each([
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ expectQuery({
range: { age: { low: 1, high: 5 } }, range: { age: { low: 1, high: 5 } },
}).toFind([{ age: 1 }])) }).toContainExactly([{ age: 1 }]))
it("successfully finds multiple rows", () => it("successfully finds multiple rows", () =>
expectQuery({ expectQuery({
range: { age: { low: 1, high: 10 } }, range: { age: { low: 1, high: 10 } },
}).toFind([{ age: 1 }, { age: 10 }])) }).toContainExactly([{ age: 1 }, { age: 10 }]))
it("successfully finds a row with a high bound", () => it("successfully finds a row with a high bound", () =>
expectQuery({ expectQuery({
range: { age: { low: 5, high: 10 } }, range: { age: { low: 5, high: 10 } },
}).toFind([{ age: 10 }])) }).toContainExactly([{ age: 10 }]))
it("successfully finds no rows", () =>
expectQuery({
range: { age: { low: 5, high: 9 } },
}).toFindNothing())
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a low value", () =>
expectQuery({
range: { age: { low: 5 } },
}).toContainExactly([{ age: 10 }]))
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a high value", () =>
expectQuery({
range: { age: { high: 5 } },
}).toContainExactly([{ age: 1 }]))
})
describe("sort", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "age",
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([{ age: 1 }, { age: 10 }]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "age",
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([{ age: 10 }, { age: 1 }]))
})
describe("sortType NUMBER", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "age",
sortType: SortType.NUMBER,
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([{ age: 1 }, { age: 10 }]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "age",
sortType: SortType.NUMBER,
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([{ age: 10 }, { age: 1 }]))
}) })
}) })
@ -211,6 +392,7 @@ describe.each([
const JAN_1ST = "2020-01-01T00:00:00.000Z" const JAN_1ST = "2020-01-01T00:00:00.000Z"
const JAN_2ND = "2020-01-02T00:00:00.000Z" const JAN_2ND = "2020-01-02T00:00:00.000Z"
const JAN_5TH = "2020-01-05T00:00:00.000Z" const JAN_5TH = "2020-01-05T00:00:00.000Z"
const JAN_9TH = "2020-01-09T00:00:00.000Z"
const JAN_10TH = "2020-01-10T00:00:00.000Z" const JAN_10TH = "2020-01-10T00:00:00.000Z"
beforeAll(async () => { beforeAll(async () => {
@ -223,7 +405,9 @@ describe.each([
describe("equal", () => { describe("equal", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ equal: { dob: JAN_1ST } }).toFind([{ dob: JAN_1ST }])) expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([
{ dob: JAN_1ST },
]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing()) expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing())
@ -231,15 +415,21 @@ describe.each([
describe("notEqual", () => { describe("notEqual", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ notEqual: { dob: JAN_1ST } }).toFind([{ dob: JAN_10TH }])) expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([
{ dob: JAN_10TH },
]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ notEqual: { dob: JAN_10TH } }).toFind([{ dob: JAN_1ST }])) expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([
{ dob: JAN_1ST },
]))
}) })
describe("oneOf", () => { describe("oneOf", () => {
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ oneOf: { dob: [JAN_1ST] } }).toFind([{ dob: JAN_1ST }])) expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([
{ dob: JAN_1ST },
]))
it("fails to find nonexistent row", () => it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing()) expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing())
@ -249,17 +439,240 @@ describe.each([
it("successfully finds a row", () => it("successfully finds a row", () =>
expectQuery({ expectQuery({
range: { dob: { low: JAN_1ST, high: JAN_5TH } }, range: { dob: { low: JAN_1ST, high: JAN_5TH } },
}).toFind([{ dob: JAN_1ST }])) }).toContainExactly([{ dob: JAN_1ST }]))
it("successfully finds multiple rows", () => it("successfully finds multiple rows", () =>
expectQuery({ expectQuery({
range: { dob: { low: JAN_1ST, high: JAN_10TH } }, range: { dob: { low: JAN_1ST, high: JAN_10TH } },
}).toFind([{ dob: JAN_1ST }, { dob: JAN_10TH }])) }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
it("successfully finds a row with a high bound", () => it("successfully finds a row with a high bound", () =>
expectQuery({ expectQuery({
range: { dob: { low: JAN_5TH, high: JAN_10TH } }, range: { dob: { low: JAN_5TH, high: JAN_10TH } },
}).toFind([{ dob: JAN_10TH }])) }).toContainExactly([{ dob: JAN_10TH }]))
it("successfully finds no rows", () =>
expectQuery({
range: { dob: { low: JAN_5TH, high: JAN_9TH } },
}).toFindNothing())
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a low value", () =>
expectQuery({
range: { dob: { low: JAN_5TH } },
}).toContainExactly([{ dob: JAN_10TH }]))
// We never implemented half-open ranges in Lucene.
!isInternal &&
it("can search using just a high value", () =>
expectQuery({
range: { dob: { high: JAN_5TH } },
}).toContainExactly([{ dob: JAN_1ST }]))
})
describe("sort", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "dob",
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "dob",
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
describe("sortType STRING", () => {
it("sorts ascending", () =>
expectSearch({
query: {},
sort: "dob",
sortType: SortType.STRING,
sortOrder: SortOrder.ASCENDING,
}).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
it("sorts descending", () =>
expectSearch({
query: {},
sort: "dob",
sortType: SortType.STRING,
sortOrder: SortOrder.DESCENDING,
}).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
})
})
})
describe("array of strings", () => {
beforeAll(async () => {
await createTable({
numbers: {
name: "numbers",
type: FieldType.ARRAY,
constraints: { inclusion: ["one", "two", "three"] },
},
})
await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
})
describe("contains", () => {
it("successfully finds a row", () =>
expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([
{ numbers: ["one", "two"] },
]))
it("fails to find nonexistent row", () =>
expectQuery({ contains: { numbers: ["none"] } }).toFindNothing())
it("fails to find row containing all", () =>
expectQuery({
contains: { numbers: ["one", "two", "three"] },
}).toFindNothing())
it("finds all with empty list", () =>
expectQuery({ contains: { numbers: [] } }).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
})
describe("notContains", () => {
it("successfully finds a row", () =>
expectQuery({ notContains: { numbers: ["one"] } }).toContainExactly([
{ numbers: ["three"] },
]))
it("fails to find nonexistent row", () =>
expectQuery({
notContains: { numbers: ["one", "two", "three"] },
}).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
it("finds all with empty list", () =>
expectQuery({ notContains: { numbers: [] } }).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
})
describe("containsAny", () => {
it("successfully finds rows", () =>
expectQuery({
containsAny: { numbers: ["one", "two", "three"] },
}).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
it("fails to find nonexistent row", () =>
expectQuery({ containsAny: { numbers: ["none"] } }).toFindNothing())
it("finds all with empty list", () =>
expectQuery({ containsAny: { numbers: [] } }).toContainExactly([
{ numbers: ["one", "two"] },
{ numbers: ["three"] },
]))
})
})
describe("bigints", () => {
const SMALL = "1"
const MEDIUM = "10000000"
// Our bigints are int64s in most datasources.
const BIG = "9223372036854775807"
beforeAll(async () => {
await createTable({
num: { name: "num", type: FieldType.BIGINT },
})
await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }])
})
describe("equal", () => {
it("successfully finds a row", () =>
expectQuery({ equal: { num: SMALL } }).toContainExactly([
{ num: SMALL },
]))
it("successfully finds a big value", () =>
expectQuery({ equal: { num: BIG } }).toContainExactly([{ num: BIG }]))
it("fails to find nonexistent row", () =>
expectQuery({ equal: { num: "2" } }).toFindNothing())
})
describe("notEqual", () => {
it("successfully finds a row", () =>
expectQuery({ notEqual: { num: SMALL } }).toContainExactly([
{ num: MEDIUM },
{ num: BIG },
]))
it("fails to find nonexistent row", () =>
expectQuery({ notEqual: { num: 10 } }).toContainExactly([
{ num: SMALL },
{ num: MEDIUM },
{ num: BIG },
]))
})
describe("oneOf", () => {
it("successfully finds a row", () =>
expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([
{ num: SMALL },
]))
it("successfully finds all rows", () =>
expectQuery({ oneOf: { num: [SMALL, MEDIUM, BIG] } }).toContainExactly([
{ num: SMALL },
{ num: MEDIUM },
{ num: BIG },
]))
it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { num: [2] } }).toFindNothing())
})
// Range searches against bigints don't seem to work at all in Lucene, and I
// couldn't figure out why. Given that we're replacing Lucene with SQS,
// we've decided not to spend time on it.
!isInternal &&
describe("range", () => {
it("successfully finds a row", () =>
expectQuery({
range: { num: { low: SMALL, high: "5" } },
}).toContainExactly([{ num: SMALL }]))
it("successfully finds multiple rows", () =>
expectQuery({
range: { num: { low: SMALL, high: MEDIUM } },
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
it("successfully finds a row with a high bound", () =>
expectQuery({
range: { num: { low: MEDIUM, high: BIG } },
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
it("successfully finds no rows", () =>
expectQuery({
range: { num: { low: "5", high: "5" } },
}).toFindNothing())
it("can search using just a low value", () =>
expectQuery({
range: { num: { low: MEDIUM } },
}).toContainExactly([{ num: MEDIUM }, { num: BIG }]))
it("can search using just a high value", () =>
expectQuery({
range: { num: { high: MEDIUM } },
}).toContainExactly([{ num: SMALL }, { num: MEDIUM }]))
}) })
}) })
}) })

View File

@ -34,7 +34,7 @@ describe.each([
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
])("/tables (%s)", (_, dsProvider) => { ])("/tables (%s)", (_, dsProvider) => {
let isInternal: boolean const isInternal: boolean = !dsProvider
let datasource: Datasource | undefined let datasource: Datasource | undefined
let config = setup.getConfig() let config = setup.getConfig()
@ -44,9 +44,6 @@ describe.each([
await config.init() await config.init()
if (dsProvider) { if (dsProvider) {
datasource = await config.api.datasource.create(await dsProvider) datasource = await config.api.datasource.create(await dsProvider)
isInternal = false
} else {
isInternal = true
} }
}) })
@ -219,9 +216,6 @@ describe.each([
it("should add a new column for an internal DB table", async () => { it("should add a new column for an internal DB table", async () => {
const saveTableRequest: SaveTableRequest = { const saveTableRequest: SaveTableRequest = {
_add: {
name: "NEW_COLUMN",
},
...basicTable(), ...basicTable(),
} }
@ -235,7 +229,6 @@ describe.each([
updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), updatedAt: expect.stringMatching(ISO_REGEX_PATTERN),
views: {}, views: {},
} }
delete expectedResponse._add
expect(response).toEqual(expectedResponse) expect(response).toEqual(expectedResponse)
}) })
}) })

View File

@ -8,7 +8,7 @@ import {
PermissionLevel, PermissionLevel,
QuotaUsageType, QuotaUsageType,
SaveTableRequest, SaveTableRequest,
SearchQueryOperators, SearchFilterOperator,
SortOrder, SortOrder,
SortType, SortType,
StaticQuotaName, StaticQuotaName,
@ -132,7 +132,7 @@ describe.each([
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "field", field: "field",
value: "value", value: "value",
}, },
@ -236,7 +236,7 @@ describe.each([
...view, ...view,
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "newField", field: "newField",
value: "thatValue", value: "thatValue",
}, },
@ -263,7 +263,7 @@ describe.each([
primaryDisplay: generator.word(), primaryDisplay: generator.word(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: generator.word(), field: generator.word(),
value: generator.word(), value: generator.word(),
}, },
@ -341,7 +341,7 @@ describe.each([
tableId: generator.guid(), tableId: generator.guid(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "newField", field: "newField",
value: "thatValue", value: "thatValue",
}, },
@ -671,7 +671,7 @@ describe.each([
name: generator.guid(), name: generator.guid(),
query: [ query: [
{ {
operator: SearchQueryOperators.EQUAL, operator: SearchFilterOperator.EQUAL,
field: "two", field: "two",
value: "bar2", value: "bar2",
}, },

View File

@ -62,7 +62,6 @@ export const definition: AutomationStepSchema = {
} }
export async function run({ inputs }: AutomationStepInput) { export async function run({ inputs }: AutomationStepInput) {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { automationId, ...fieldParams } = inputs.automation const { automationId, ...fieldParams } = inputs.automation
if (await features.isTriggerAutomationRunEnabled()) { if (await features.isTriggerAutomationRunEnabled()) {

View File

@ -20,6 +20,7 @@ export enum FilterTypes {
NOT_EMPTY = "notEmpty", NOT_EMPTY = "notEmpty",
CONTAINS = "contains", CONTAINS = "contains",
NOT_CONTAINS = "notContains", NOT_CONTAINS = "notContains",
CONTAINS_ANY = "containsAny",
ONE_OF = "oneOf", ONE_OF = "oneOf",
} }
@ -30,6 +31,7 @@ export const NoEmptyFilterStrings = [
FilterTypes.NOT_EQUAL, FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS, FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS, FilterTypes.NOT_CONTAINS,
FilterTypes.CONTAINS_ANY,
] ]
export const CanSwitchTypes = [ export const CanSwitchTypes = [

View File

@ -40,6 +40,7 @@ export const USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${dbCore.Inte
export const LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${dbCore.InternalTable.USER_METADATA}${SEPARATOR}` export const LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${dbCore.InternalTable.USER_METADATA}${SEPARATOR}`
export const TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}` export const TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
export const AUTOMATION_LOG_PREFIX = `${DocumentType.AUTOMATION_LOG}${SEPARATOR}` export const AUTOMATION_LOG_PREFIX = `${DocumentType.AUTOMATION_LOG}${SEPARATOR}`
export const SQS_DATASOURCE_INTERNAL = "internal"
export const ViewName = dbCore.ViewName export const ViewName = dbCore.ViewName
export const InternalTables = dbCore.InternalTable export const InternalTables = dbCore.InternalTable
export const UNICODE_MAX = dbCore.UNICODE_MAX export const UNICODE_MAX = dbCore.UNICODE_MAX

View File

@ -28,6 +28,7 @@ const DEFAULTS = {
PLUGINS_DIR: "/plugins", PLUGINS_DIR: "/plugins",
FORKED_PROCESS_NAME: "main", FORKED_PROCESS_NAME: "main",
JS_RUNNER_MEMORY_LIMIT: 64, JS_RUNNER_MEMORY_LIMIT: 64,
COUCH_DB_SQL_URL: "http://localhost:4006",
} }
const QUERY_THREAD_TIMEOUT = const QUERY_THREAD_TIMEOUT =
@ -39,6 +40,7 @@ const environment = {
// important - prefer app port to generic port // important - prefer app port to generic port
PORT: process.env.APP_PORT || process.env.PORT, PORT: process.env.APP_PORT || process.env.PORT,
COUCH_DB_URL: process.env.COUCH_DB_URL, COUCH_DB_URL: process.env.COUCH_DB_URL,
COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || DEFAULTS.COUCH_DB_SQL_URL,
MINIO_URL: process.env.MINIO_URL, MINIO_URL: process.env.MINIO_URL,
WORKER_URL: process.env.WORKER_URL, WORKER_URL: process.env.WORKER_URL,
AWS_REGION: process.env.AWS_REGION, AWS_REGION: process.env.AWS_REGION,

View File

@ -1,11 +1,8 @@
import { features } from "@budibase/backend-core" import { features } from "@budibase/backend-core"
import env from "./environment" import env from "./environment"
// eslint-disable-next-line no-unused-vars
enum AppFeature { enum AppFeature {
// eslint-disable-next-line no-unused-vars
API = "api", API = "api",
// eslint-disable-next-line no-unused-vars
AUTOMATIONS = "automations", AUTOMATIONS = "automations",
} }

View File

@ -16,7 +16,6 @@ import {
getDatasource, getDatasource,
rawQuery, rawQuery,
} from "../integrations/tests/utils" } from "../integrations/tests/utils"
import { builderSocket } from "../websockets"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
// @ts-ignore // @ts-ignore
fetch.mockSearch() fetch.mockSearch()
@ -233,72 +232,6 @@ describe("mysql integrations", () => {
}) })
describe("POST /api/tables/", () => { describe("POST /api/tables/", () => {
const emitDatasourceUpdateMock = jest.fn()
it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => {
const addColumnToTable: TableRequest = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
name: uniqueTableName(),
sourceId: datasource._id!,
primary: ["id"],
schema: {
id: {
type: FieldType.AUTO,
name: "id",
autocolumn: true,
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
},
},
_add: {
name: "new_column",
},
}
jest
.spyOn(builderSocket!, "emitDatasourceUpdate")
.mockImplementation(emitDatasourceUpdateMock)
await makeRequest("post", "/api/tables/", addColumnToTable)
const expectedTable: TableRequest = {
...addColumnToTable,
schema: {
id: {
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
presence: false,
},
externalType: "int unsigned",
},
new_column: {
type: FieldType.NUMBER,
name: "new_column",
autocolumn: false,
constraints: {
presence: false,
},
externalType: "float(8,2)",
},
},
created: true,
_id: `${datasource._id}__${addColumnToTable.name}`,
}
delete expectedTable._add
expect(emitDatasourceUpdateMock).toHaveBeenCalledTimes(1)
const emittedDatasource: Datasource =
emitDatasourceUpdateMock.mock.calls[0][1]
expect(emittedDatasource.entities![expectedTable.name]).toEqual(
expectedTable
)
})
it("will rename a column", async () => { it("will rename a column", async () => {
await makeRequest("post", "/api/tables/", primaryMySqlTable) await makeRequest("post", "/api/tables/", primaryMySqlTable)

View File

@ -2,6 +2,7 @@ import {
QueryJson, QueryJson,
Datasource, Datasource,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
RowOperations,
} from "@budibase/types" } from "@budibase/types"
import { getIntegration } from "../index" import { getIntegration } from "../index"
import sdk from "../../sdk" import sdk from "../../sdk"
@ -10,6 +11,17 @@ export async function makeExternalQuery(
datasource: Datasource, datasource: Datasource,
json: QueryJson json: QueryJson
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const entityId = json.endpoint.entityId,
tableName = json.meta.table.name,
tableId = json.meta.table._id
// case found during testing - make sure this doesn't happen again
if (
RowOperations.includes(json.endpoint.operation) &&
entityId !== tableId &&
entityId !== tableName
) {
throw new Error("Entity ID and table metadata do not align")
}
datasource = await sdk.datasources.enrich(datasource) datasource = await sdk.datasources.enrich(datasource)
const Integration = await getIntegration(datasource.source) const Integration = await getIntegration(datasource.source)
// query is the opinionated function // query is the opinionated function

View File

@ -6,6 +6,7 @@ import {
SqlClient, SqlClient,
isValidFilter, isValidFilter,
getNativeSql, getNativeSql,
SqlStatements,
} from "../utils" } from "../utils"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
@ -22,6 +23,8 @@ import {
SortDirection, SortDirection,
SqlQueryBinding, SqlQueryBinding,
Table, Table,
TableSourceType,
INTERNAL_TABLE_SOURCE_ID,
} from "@budibase/types" } from "@budibase/types"
import environment from "../../environment" import environment from "../../environment"
@ -135,6 +138,18 @@ function generateSelectStatement(
}) })
} }
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
if (
table?.sourceType === TableSourceType.INTERNAL ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
return table?._id
} else {
return table?.name
}
}
class InternalBuilder { class InternalBuilder {
private readonly client: string private readonly client: string
@ -146,10 +161,20 @@ class InternalBuilder {
addFilters( addFilters(
query: Knex.QueryBuilder, query: Knex.QueryBuilder,
filters: SearchFilters | undefined, filters: SearchFilters | undefined,
tableName: string, table: Table,
opts: { aliases?: Record<string, string>; relationship?: boolean } opts: { aliases?: Record<string, string>; relationship?: boolean }
): Knex.QueryBuilder { ): Knex.QueryBuilder {
function getTableName(name: string) { if (!filters) {
return query
}
filters = parseFilters(filters)
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
const sqlStatements = new SqlStatements(this.client, table, { allOr })
const tableName =
this.client === SqlClient.SQL_LITE ? table._id! : table.name
function getTableAlias(name: string) {
const alias = opts.aliases?.[name] const alias = opts.aliases?.[name]
return alias || name return alias || name
} }
@ -161,11 +186,11 @@ class InternalBuilder {
const updatedKey = dbCore.removeKeyNumbering(key) const updatedKey = dbCore.removeKeyNumbering(key)
const isRelationshipField = updatedKey.includes(".") const isRelationshipField = updatedKey.includes(".")
if (!opts.relationship && !isRelationshipField) { if (!opts.relationship && !isRelationshipField) {
fn(`${getTableName(tableName)}.${updatedKey}`, value) fn(`${getTableAlias(tableName)}.${updatedKey}`, value)
} }
if (opts.relationship && isRelationshipField) { if (opts.relationship && isRelationshipField) {
const [filterTableName, property] = updatedKey.split(".") const [filterTableName, property] = updatedKey.split(".")
fn(`${getTableName(filterTableName)}.${property}`, value) fn(`${getTableAlias(filterTableName)}.${property}`, value)
} }
} }
} }
@ -233,18 +258,17 @@ class InternalBuilder {
(statement ? andOr : "") + (statement ? andOr : "") +
`LOWER(${likeKey(this.client, key)}) LIKE ?` `LOWER(${likeKey(this.client, key)}) LIKE ?`
} }
if (statement === "") {
return
}
// @ts-ignore // @ts-ignore
query = query[rawFnc](`${not}(${statement})`, value) query = query[rawFnc](`${not}(${statement})`, value)
}) })
} }
} }
if (!filters) {
return query
}
filters = parseFilters(filters)
// if all or specified in filters, then everything is an or
const allOr = filters.allOr
if (filters.oneOf) { if (filters.oneOf) {
iterate(filters.oneOf, (key, array) => { iterate(filters.oneOf, (key, array) => {
const fnc = allOr ? "orWhereIn" : "whereIn" const fnc = allOr ? "orWhereIn" : "whereIn"
@ -287,17 +311,11 @@ class InternalBuilder {
const lowValid = isValidFilter(value.low), const lowValid = isValidFilter(value.low),
highValid = isValidFilter(value.high) highValid = isValidFilter(value.high)
if (lowValid && highValid) { if (lowValid && highValid) {
// Use a between operator if we have 2 valid range values query = sqlStatements.between(query, key, value.low, value.high)
const fnc = allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [value.low, value.high])
} else if (lowValid) { } else if (lowValid) {
// Use just a single greater than operator if we only have a low query = sqlStatements.lte(query, key, value.low)
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, ">", value.low)
} else if (highValid) { } else if (highValid) {
// Use just a single less than operator if we only have a high query = sqlStatements.gte(query, key, value.high)
const fnc = allOr ? "orWhere" : "where"
query = query[fnc](key, "<", value.high)
} }
}) })
} }
@ -340,10 +358,11 @@ class InternalBuilder {
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json let { sort, paginate } = json
const table = json.meta?.table const table = json.meta.table
const tableName = getTableName(table)
const aliases = json.tableAliases const aliases = json.tableAliases
const aliased = const aliased =
table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
if (sort && Object.keys(sort || {}).length > 0) { if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) { for (let [key, value] of Object.entries(sort)) {
const direction = const direction =
@ -453,14 +472,13 @@ class InternalBuilder {
): Knex.QueryBuilder { ): Knex.QueryBuilder {
const tableName = endpoint.entityId const tableName = endpoint.entityId
const tableAlias = aliases?.[tableName] const tableAlias = aliases?.[tableName]
let table: string | Record<string, string> = tableName
if (tableAlias) { const query = knex(
table = { [tableAlias]: tableName } this.tableNameWithSchema(tableName, {
} alias: tableAlias,
let query = knex(table) schema: endpoint.schema,
if (endpoint.schema) { })
query = query.withSchema(endpoint.schema) )
}
return query return query
} }
@ -527,7 +545,7 @@ class InternalBuilder {
if (foundOffset) { if (foundOffset) {
query = query.offset(foundOffset) query = query.offset(foundOffset)
} }
query = this.addFilters(query, filters, tableName, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// add sorting to pre-query // add sorting to pre-query
@ -548,7 +566,7 @@ class InternalBuilder {
endpoint.schema, endpoint.schema,
tableAliases tableAliases
) )
return this.addFilters(query, filters, tableName, { return this.addFilters(query, filters, json.meta.table, {
relationship: true, relationship: true,
aliases: tableAliases, aliases: tableAliases,
}) })
@ -558,7 +576,7 @@ class InternalBuilder {
const { endpoint, body, filters, tableAliases } = json const { endpoint, body, filters, tableAliases } = json
let query = this.knexWithAlias(knex, endpoint, tableAliases) let query = this.knexWithAlias(knex, endpoint, tableAliases)
const parsedBody = parseBody(body) const parsedBody = parseBody(body)
query = this.addFilters(query, filters, endpoint.entityId, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// mysql can't use returning // mysql can't use returning
@ -572,7 +590,7 @@ class InternalBuilder {
delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder {
const { endpoint, filters, tableAliases } = json const { endpoint, filters, tableAliases } = json
let query = this.knexWithAlias(knex, endpoint, tableAliases) let query = this.knexWithAlias(knex, endpoint, tableAliases)
query = this.addFilters(query, filters, endpoint.entityId, { query = this.addFilters(query, filters, json.meta.table, {
aliases: tableAliases, aliases: tableAliases,
}) })
// mysql can't use returning // mysql can't use returning
@ -664,7 +682,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
// when creating if an ID has been inserted need to make sure // when creating if an ID has been inserted need to make sure
// the id filter is enriched with it before trying to retrieve the row // the id filter is enriched with it before trying to retrieve the row
checkLookupKeys(id: any, json: QueryJson) { checkLookupKeys(id: any, json: QueryJson) {
if (!id || !json.meta?.table || !json.meta.table.primary) { if (!id || !json.meta.table || !json.meta.table.primary) {
return json return json
} }
const primaryKey = json.meta.table.primary?.[0] const primaryKey = json.meta.table.primary?.[0]
@ -724,12 +742,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
results: Record<string, any>[], results: Record<string, any>[],
aliases?: Record<string, string> aliases?: Record<string, string>
): Record<string, any>[] { ): Record<string, any>[] {
const tableName = getTableName(table)
for (const [name, field] of Object.entries(table.schema)) { for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) { if (!this._isJsonColumn(field)) {
continue continue
} }
const tableName = aliases?.[table.name] || table.name const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
const fullName = `${tableName}.${name}` const fullName = `${aliasedTableName}.${name}`
for (let row of results) { for (let row of results) {
if (typeof row[fullName] === "string") { if (typeof row[fullName] === "string") {
row[fullName] = JSON.parse(row[fullName]) row[fullName] = JSON.parse(row[fullName])

View File

@ -1,19 +1,20 @@
import { Knex, knex } from "knex" import { Knex, knex } from "knex"
import { import {
RelationshipType,
FieldSubtype, FieldSubtype,
FieldType,
NumberFieldMetadata, NumberFieldMetadata,
Operation, Operation,
QueryJson, QueryJson,
RelationshipType,
RenameColumn, RenameColumn,
Table,
FieldType,
SqlQuery, SqlQuery,
Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { breakExternalTableId, getNativeSql, SqlClient } from "../utils" import { breakExternalTableId, getNativeSql, SqlClient } from "../utils"
import { utils } from "@budibase/shared-core"
import SchemaBuilder = Knex.SchemaBuilder import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder import CreateTableBuilder = Knex.CreateTableBuilder
import { utils } from "@budibase/shared-core"
function isIgnoredType(type: FieldType) { function isIgnoredType(type: FieldType) {
const ignored = [FieldType.LINK, FieldType.FORMULA] const ignored = [FieldType.LINK, FieldType.FORMULA]
@ -105,13 +106,13 @@ function generateSchema(
column.relationshipType !== RelationshipType.MANY_TO_MANY column.relationshipType !== RelationshipType.MANY_TO_MANY
) { ) {
if (!column.foreignKey || !column.tableId) { if (!column.foreignKey || !column.tableId) {
throw "Invalid relationship schema" throw new Error("Invalid relationship schema")
} }
const { tableName } = breakExternalTableId(column.tableId) const { tableName } = breakExternalTableId(column.tableId)
// @ts-ignore // @ts-ignore
const relatedTable = tables[tableName] const relatedTable = tables[tableName]
if (!relatedTable) { if (!relatedTable) {
throw "Referenced table doesn't exist" throw new Error("Referenced table doesn't exist")
} }
const relatedPrimary = relatedTable.primary[0] const relatedPrimary = relatedTable.primary[0]
const externalType = relatedTable.schema[relatedPrimary].externalType const externalType = relatedTable.schema[relatedPrimary].externalType
@ -209,15 +210,19 @@ class SqlTableQueryBuilder {
let query: Knex.SchemaBuilder let query: Knex.SchemaBuilder
if (!json.table || !json.meta || !json.meta.tables) { if (!json.table || !json.meta || !json.meta.tables) {
throw "Cannot execute without table being specified" throw new Error("Cannot execute without table being specified")
} }
if (json.table.sourceType === TableSourceType.INTERNAL) {
throw new Error("Cannot perform table actions for SQS.")
}
switch (this._operation(json)) { switch (this._operation(json)) {
case Operation.CREATE_TABLE: case Operation.CREATE_TABLE:
query = buildCreateTable(client, json.table, json.meta.tables) query = buildCreateTable(client, json.table, json.meta.tables)
break break
case Operation.UPDATE_TABLE: case Operation.UPDATE_TABLE:
if (!json.meta || !json.meta.table) { if (!json.meta || !json.meta.table) {
throw "Must specify old table for update" throw new Error("Must specify old table for update")
} }
// renameColumn does not work for MySQL, so return a raw query // renameColumn does not work for MySQL, so return a raw query
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) { if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
@ -264,7 +269,7 @@ class SqlTableQueryBuilder {
query = buildDeleteTable(client, json.table) query = buildDeleteTable(client, json.table)
break break
default: default:
throw "Table operation is of unknown type" throw new Error("Table operation is of unknown type")
} }
return getNativeSql(query) return getNativeSql(query)
} }

View File

@ -168,8 +168,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
return "" return ""
} }
// eslint-disable-next-line @typescript-eslint/no-unused-vars getStringConcat(_parts: string[]) {
getStringConcat(parts: string[]) {
return "" return ""
} }

View File

@ -1,14 +1,27 @@
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import Sql from "../base/sql" import Sql from "../base/sql"
import { import {
FieldType,
Operation, Operation,
QueryJson, QueryJson,
TableSourceType,
Table, Table,
FieldType, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
const TABLE_NAME = "test" const TABLE_NAME = "test"
const TABLE: Table = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {
id: {
name: "id",
type: FieldType.NUMBER,
},
},
name: TABLE_NAME,
primary: ["id"],
}
function endpoint(table: any, operation: any) { function endpoint(table: any, operation: any) {
return { return {
@ -25,6 +38,10 @@ function generateReadJson({
sort, sort,
paginate, paginate,
}: any = {}): QueryJson { }: any = {}): QueryJson {
const tableObj = { ...TABLE }
if (table) {
tableObj.name = table
}
return { return {
endpoint: endpoint(table || TABLE_NAME, "READ"), endpoint: endpoint(table || TABLE_NAME, "READ"),
resource: { resource: {
@ -34,14 +51,7 @@ function generateReadJson({
sort: sort || {}, sort: sort || {},
paginate: paginate || {}, paginate: paginate || {},
meta: { meta: {
table: { table: tableObj,
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {},
name: table || TABLE_NAME,
primary: ["id"],
} as any,
}, },
} }
} }
@ -49,6 +59,9 @@ function generateReadJson({
function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson { function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson {
return { return {
endpoint: endpoint(table, "CREATE"), endpoint: endpoint(table, "CREATE"),
meta: {
table: TABLE,
},
body, body,
} }
} }
@ -58,7 +71,15 @@ function generateUpdateJson({
body = {}, body = {},
filters = {}, filters = {},
meta = {}, meta = {},
}: {
table: string
body?: any
filters?: any
meta?: any
}): QueryJson { }): QueryJson {
if (!meta.table) {
meta.table = TABLE
}
return { return {
endpoint: endpoint(table, "UPDATE"), endpoint: endpoint(table, "UPDATE"),
filters, filters,
@ -70,6 +91,9 @@ function generateUpdateJson({
function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson { function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson {
return { return {
endpoint: endpoint(table, "DELETE"), endpoint: endpoint(table, "DELETE"),
meta: {
table: TABLE,
},
filters, filters,
} }
} }
@ -102,6 +126,9 @@ function generateRelationshipJson(config: { schema?: string } = {}): QueryJson {
}, },
], ],
extra: { idFilter: {} }, extra: { idFilter: {} },
meta: {
table: TABLE,
},
} }
} }
@ -136,6 +163,9 @@ function generateManyRelationshipJson(config: { schema?: string } = {}) {
}, },
], ],
extra: { idFilter: {} }, extra: { idFilter: {} },
meta: {
table: TABLE,
},
} }
} }
@ -319,7 +349,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [date, limit], bindings: [date, limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" > $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" >= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })
@ -338,7 +368,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: [date, limit], bindings: [date, limit],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" < $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."property" <= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })
@ -572,7 +602,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["2000-01-01 00:00:00", 500], bindings: ["2000-01-01 00:00:00", 500],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" > $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" >= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })
@ -591,7 +621,7 @@ describe("SQL query builder", () => {
) )
expect(query).toEqual({ expect(query).toEqual({
bindings: ["2010-01-01 00:00:00", 500], bindings: ["2010-01-01 00:00:00", 500],
sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" < $1 limit $2) as "${TABLE_NAME}"`, sql: `select * from (select * from "${TABLE_NAME}" where "${TABLE_NAME}"."dob" <= $1 limit $2) as "${TABLE_NAME}"`,
}) })
}) })

View File

@ -4,12 +4,26 @@ import {
QueryJson, QueryJson,
SourceName, SourceName,
SqlQuery, SqlQuery,
Table,
TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { join } from "path" import { join } from "path"
import Sql from "../base/sql" import Sql from "../base/sql"
import { SqlClient } from "../utils" import { SqlClient } from "../utils"
import AliasTables from "../../api/controllers/row/alias"
import { generator } from "@budibase/backend-core/tests" import { generator } from "@budibase/backend-core/tests"
import sdk from "../../sdk"
// this doesn't exist strictly
const TABLE: Table = {
type: "table",
sourceType: TableSourceType.EXTERNAL,
sourceId: "SOURCE_ID",
schema: {},
name: "tableName",
primary: ["id"],
}
const AliasTables = sdk.rows.AliasTables
function multiline(sql: string) { function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ") return sql.replace(/\n/g, "").replace(/ +/g, " ")
@ -103,7 +117,8 @@ describe("Captures of real examples", () => {
let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson)
const filters = queryJson.filters const filters = queryJson.filters
const notEqualsValue = Object.values(filters?.notEqual!)[0] const notEqualsValue = Object.values(filters?.notEqual!)[0]
const rangeValue = Object.values(filters?.range!)[0] const rangeValue: { high?: string | number; low?: string | number } =
Object.values(filters?.range!)[0]
const equalValue = Object.values(filters?.equal!)[0] const equalValue = Object.values(filters?.equal!)[0]
expect(query).toEqual({ expect(query).toEqual({
@ -220,6 +235,9 @@ describe("Captures of real examples", () => {
resource: { resource: {
fields, fields,
}, },
meta: {
table: TABLE,
},
} }
} }

View File

@ -0,0 +1,2 @@
export * from "./utils"
export { SqlStatements } from "./sqlStatements"

View File

@ -0,0 +1,80 @@
import { FieldType, Table, FieldSchema } from "@budibase/types"
import { SqlClient } from "./utils"
import { Knex } from "knex"
export class SqlStatements {
client: string
table: Table
allOr: boolean | undefined
constructor(
client: string,
table: Table,
{ allOr }: { allOr?: boolean } = {}
) {
this.client = client
this.table = table
this.allOr = allOr
}
getField(key: string): FieldSchema | undefined {
const fieldName = key.split(".")[1]
return this.table.schema[fieldName]
}
between(
query: Knex.QueryBuilder,
key: string,
low: number | string,
high: number | string
) {
// Use a between operator if we have 2 valid range values
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(
`CAST(${key} AS INTEGER) BETWEEN CAST(? AS INTEGER) AND CAST(? AS INTEGER)`,
[low, high]
)
} else {
const fnc = this.allOr ? "orWhereBetween" : "whereBetween"
query = query[fnc](key, [low, high])
}
return query
}
lte(query: Knex.QueryBuilder, key: string, low: number | string) {
// Use just a single greater than operator if we only have a low
const field = this.getField(key)
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) >= CAST(? AS INTEGER)`, [
low,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, ">=", low)
}
return query
}
gte(query: Knex.QueryBuilder, key: string, high: number | string) {
const field = this.getField(key)
// Use just a single less than operator if we only have a high
if (
field?.type === FieldType.BIGINT &&
this.client === SqlClient.SQL_LITE
) {
query = query.whereRaw(`CAST(${key} AS INTEGER) <= CAST(? AS INTEGER)`, [
high,
])
} else {
const fnc = this.allOr ? "orWhere" : "where"
query = query[fnc](key, "<=", high)
}
return query
}
}

View File

@ -4,18 +4,40 @@ import {
Datasource, Datasource,
FieldType, FieldType,
TableSourceType, TableSourceType,
FieldSchema,
} from "@budibase/types" } from "@budibase/types"
import { DocumentType, SEPARATOR } from "../db/utils" import { DocumentType, SEPARATOR } from "../../db/utils"
import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../constants" import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../../constants"
import { helpers } from "@budibase/shared-core" import { helpers, utils } from "@budibase/shared-core"
import env from "../environment" import env from "../../environment"
import { Knex } from "knex" import { Knex } from "knex"
const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}`
const ROW_ID_REGEX = /^\[.*]$/g const ROW_ID_REGEX = /^\[.*]$/g
const ENCODED_SPACE = encodeURIComponent(" ") const ENCODED_SPACE = encodeURIComponent(" ")
const SQL_NUMBER_TYPE_MAP = { type PrimitiveTypes =
| FieldType.STRING
| FieldType.NUMBER
| FieldType.BOOLEAN
| FieldType.DATETIME
| FieldType.JSON
| FieldType.BIGINT
| FieldType.OPTIONS
function isPrimitiveType(type: FieldType): type is PrimitiveTypes {
return [
FieldType.STRING,
FieldType.NUMBER,
FieldType.BOOLEAN,
FieldType.DATETIME,
FieldType.JSON,
FieldType.BIGINT,
FieldType.OPTIONS,
].includes(type)
}
const SQL_NUMBER_TYPE_MAP: Record<string, PrimitiveTypes> = {
integer: FieldType.NUMBER, integer: FieldType.NUMBER,
int: FieldType.NUMBER, int: FieldType.NUMBER,
decimal: FieldType.NUMBER, decimal: FieldType.NUMBER,
@ -35,7 +57,7 @@ const SQL_NUMBER_TYPE_MAP = {
smallmoney: FieldType.NUMBER, smallmoney: FieldType.NUMBER,
} }
const SQL_DATE_TYPE_MAP = { const SQL_DATE_TYPE_MAP: Record<string, PrimitiveTypes> = {
timestamp: FieldType.DATETIME, timestamp: FieldType.DATETIME,
time: FieldType.DATETIME, time: FieldType.DATETIME,
datetime: FieldType.DATETIME, datetime: FieldType.DATETIME,
@ -46,7 +68,7 @@ const SQL_DATE_TYPE_MAP = {
const SQL_DATE_ONLY_TYPES = ["date"] const SQL_DATE_ONLY_TYPES = ["date"]
const SQL_TIME_ONLY_TYPES = ["time"] const SQL_TIME_ONLY_TYPES = ["time"]
const SQL_STRING_TYPE_MAP = { const SQL_STRING_TYPE_MAP: Record<string, PrimitiveTypes> = {
varchar: FieldType.STRING, varchar: FieldType.STRING,
char: FieldType.STRING, char: FieldType.STRING,
nchar: FieldType.STRING, nchar: FieldType.STRING,
@ -58,22 +80,22 @@ const SQL_STRING_TYPE_MAP = {
text: FieldType.STRING, text: FieldType.STRING,
} }
const SQL_BOOLEAN_TYPE_MAP = { const SQL_BOOLEAN_TYPE_MAP: Record<string, PrimitiveTypes> = {
boolean: FieldType.BOOLEAN, boolean: FieldType.BOOLEAN,
bit: FieldType.BOOLEAN, bit: FieldType.BOOLEAN,
tinyint: FieldType.BOOLEAN, tinyint: FieldType.BOOLEAN,
} }
const SQL_OPTIONS_TYPE_MAP = { const SQL_OPTIONS_TYPE_MAP: Record<string, PrimitiveTypes> = {
"user-defined": FieldType.OPTIONS, "user-defined": FieldType.OPTIONS,
} }
const SQL_MISC_TYPE_MAP = { const SQL_MISC_TYPE_MAP: Record<string, PrimitiveTypes> = {
json: FieldType.JSON, json: FieldType.JSON,
bigint: FieldType.BIGINT, bigint: FieldType.BIGINT,
} }
const SQL_TYPE_MAP = { const SQL_TYPE_MAP: Record<string, PrimitiveTypes> = {
...SQL_NUMBER_TYPE_MAP, ...SQL_NUMBER_TYPE_MAP,
...SQL_DATE_TYPE_MAP, ...SQL_DATE_TYPE_MAP,
...SQL_STRING_TYPE_MAP, ...SQL_STRING_TYPE_MAP,
@ -239,14 +261,14 @@ export function generateColumnDefinition(config: {
constraints.inclusion = options constraints.inclusion = options
} }
const schema: any = { const schema: FieldSchema = {
type: foundType, type: foundType,
externalType, externalType,
autocolumn, autocolumn,
name, name,
constraints, constraints,
} }
if (foundType === FieldType.DATETIME) { if (schema.type === FieldType.DATETIME) {
schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType) schema.dateOnly = SQL_DATE_ONLY_TYPES.includes(lowerCaseType)
schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType) schema.timeOnly = SQL_TIME_ONLY_TYPES.includes(lowerCaseType)
} }
@ -274,59 +296,6 @@ export function isIsoDateString(str: string) {
return d.toISOString() === trimmedValue return d.toISOString() === trimmedValue
} }
/**
* This function will determine whether a column is a relationship and whether it
* is currently valid. The reason for the validity check is that tables can be deleted
* outside of Budibase control and if this is the case it will break Budibase relationships.
* The tableIds is a list passed down from the main finalise tables function, which is
* based on the tables that have just been fetched. This will only really be used on subsequent
* fetches to the first one - if the user is periodically refreshing Budibase knowledge of tables.
* @param column The column to check, to see if it is a valid relationship.
* @param tableIds The IDs of the tables which currently exist.
*/
export function shouldCopyRelationship(
column: { type: string; tableId?: string },
tableIds: string[]
) {
return (
column.type === FieldType.LINK &&
column.tableId &&
tableIds.includes(column.tableId)
)
}
/**
* Similar function to the shouldCopyRelationship function, but instead this looks for options and boolean
* types. It is possible to switch a string -> options and a number -> boolean (and vice versus) need to make
* sure that these get copied over when tables are fetched. Also checks whether they are still valid, if a
* column has changed type in the external database then copying it over may not be possible.
* @param column The column to check for options or boolean type.
* @param fetchedColumn The fetched column to check for the type in the external database.
*/
export function shouldCopySpecialColumn(
column: { type: string },
fetchedColumn: { type: string } | undefined
) {
const isFormula = column.type === FieldType.FORMULA
const specialTypes = [
FieldType.OPTIONS,
FieldType.LONGFORM,
FieldType.ARRAY,
FieldType.FORMULA,
FieldType.BB_REFERENCE,
]
// column has been deleted, remove - formulas will never exist, always copy
if (!isFormula && column && !fetchedColumn) {
return false
}
const fetchedIsNumber =
!fetchedColumn || fetchedColumn.type === FieldType.NUMBER
return (
specialTypes.indexOf(column.type as FieldType) !== -1 ||
(fetchedIsNumber && column.type === FieldType.BOOLEAN)
)
}
/** /**
* Looks for columns which need to be copied over into the new table definitions, like relationships, * Looks for columns which need to be copied over into the new table definitions, like relationships,
* options types and views. * options types and views.
@ -348,6 +317,9 @@ function copyExistingPropsOver(
if (entities[tableName]?.created) { if (entities[tableName]?.created) {
table.created = entities[tableName]?.created table.created = entities[tableName]?.created
} }
if (entities[tableName]?.constrained) {
table.constrained = entities[tableName]?.constrained
}
table.views = entities[tableName].views table.views = entities[tableName].views
@ -356,12 +328,73 @@ function copyExistingPropsOver(
if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) { if (!Object.prototype.hasOwnProperty.call(existingTableSchema, key)) {
continue continue
} }
const column = existingTableSchema[key] const column = existingTableSchema[key]
if (
shouldCopyRelationship(column, tableIds) || const existingColumnType = column?.type
shouldCopySpecialColumn(column, table.schema[key]) const updatedColumnType = table.schema[key]?.type
) {
table.schema[key] = existingTableSchema[key] const keepIfType = (...validTypes: PrimitiveTypes[]) => {
return (
isPrimitiveType(updatedColumnType) &&
table.schema[key] &&
validTypes.includes(updatedColumnType)
)
}
let shouldKeepSchema = false
switch (existingColumnType) {
case FieldType.FORMULA:
case FieldType.AUTO:
case FieldType.INTERNAL:
shouldKeepSchema = true
break
case FieldType.LINK:
shouldKeepSchema =
existingColumnType === FieldType.LINK &&
tableIds.includes(column.tableId)
break
case FieldType.STRING:
case FieldType.OPTIONS:
case FieldType.LONGFORM:
case FieldType.BARCODEQR:
shouldKeepSchema = keepIfType(FieldType.STRING)
break
case FieldType.NUMBER:
case FieldType.BOOLEAN:
shouldKeepSchema = keepIfType(FieldType.BOOLEAN, FieldType.NUMBER)
break
case FieldType.ARRAY:
case FieldType.ATTACHMENTS:
case FieldType.ATTACHMENT_SINGLE:
case FieldType.JSON:
case FieldType.BB_REFERENCE:
shouldKeepSchema = keepIfType(FieldType.JSON, FieldType.STRING)
break
case FieldType.DATETIME:
shouldKeepSchema = keepIfType(FieldType.DATETIME, FieldType.STRING)
break
case FieldType.BIGINT:
shouldKeepSchema = keepIfType(FieldType.BIGINT, FieldType.NUMBER)
break
default:
utils.unreachable(existingColumnType)
}
if (shouldKeepSchema) {
table.schema[key] = {
...existingTableSchema[key],
externalType:
existingTableSchema[key].externalType ||
table.schema[key]?.externalType,
}
} }
} }
} }

View File

@ -35,7 +35,6 @@ export function init() {
// Because we can't pass functions into an Isolate, we remove them from // Because we can't pass functions into an Isolate, we remove them from
// the passed context and rely on the withHelpers() method to add them // the passed context and rely on the withHelpers() method to add them
// back in. // back in.
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { helpers, snippets, ...rest } = ctx const { helpers, snippets, ...rest } = ctx
return vm.withContext(rest, () => vm.execute(js)) return vm.withContext(rest, () => vm.execute(js))
} catch (error: any) { } catch (error: any) {

View File

@ -348,8 +348,7 @@ const preSaveAction: Partial<Record<SourceName, any>> = {
* Make sure all datasource entities have a display name selected * Make sure all datasource entities have a display name selected
*/ */
export function setDefaultDisplayColumns(datasource: Datasource) { export function setDefaultDisplayColumns(datasource: Datasource) {
// for (const entity of Object.values(datasource.entities || {})) {
for (let entity of Object.values(datasource.entities || {})) {
if (entity.primaryDisplay) { if (entity.primaryDisplay) {
continue continue
} }

View File

@ -3,6 +3,7 @@ import * as rows from "./rows"
import * as search from "./search" import * as search from "./search"
import * as utils from "./utils" import * as utils from "./utils"
import * as external from "./external" import * as external from "./external"
import AliasTables from "./sqlAlias"
export default { export default {
...attachments, ...attachments,
@ -10,4 +11,5 @@ export default {
...search, ...search,
utils, utils,
external, external,
AliasTables,
} }

View File

@ -13,6 +13,8 @@ import * as sqs from "./search/sqs"
import env from "../../../environment" import env from "../../../environment"
import { ExportRowsParams, ExportRowsResult } from "./search/types" import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core" import { dataFilters } from "@budibase/shared-core"
import sdk from "../../index"
import { searchInputMapping } from "./search/utils"
export { isValidFilter } from "../../../integrations/utils" export { isValidFilter } from "../../../integrations/utils"
@ -29,6 +31,10 @@ function pickApi(tableId: any) {
return internal return internal
} }
function isEmptyArray(value: any) {
return Array.isArray(value) && value.length === 0
}
// don't do a pure falsy check, as 0 is included // don't do a pure falsy check, as 0 is included
// https://github.com/Budibase/budibase/issues/10118 // https://github.com/Budibase/budibase/issues/10118
export function removeEmptyFilters(filters: SearchFilters) { export function removeEmptyFilters(filters: SearchFilters) {
@ -47,7 +53,7 @@ export function removeEmptyFilters(filters: SearchFilters) {
for (let [key, value] of Object.entries( for (let [key, value] of Object.entries(
filters[filterType] as object filters[filterType] as object
)) { )) {
if (value == null || value === "") { if (value == null || value === "" || isEmptyArray(value)) {
// @ts-ignore // @ts-ignore
delete filters[filterField][key] delete filters[filterField][key]
} }
@ -72,12 +78,15 @@ export async function search(
} }
} }
const table = await sdk.tables.getTable(options.tableId)
options = searchInputMapping(table, options)
if (isExternalTable) { if (isExternalTable) {
return external.search(options) return external.search(options, table)
} else if (env.SQS_SEARCH_ENABLE) { } else if (env.SQS_SEARCH_ENABLE) {
return sqs.search(options) return sqs.search(options, table)
} else { } else {
return internal.search(options) return internal.search(options, table)
} }
} }

View File

@ -8,6 +8,7 @@ import {
SearchFilters, SearchFilters,
RowSearchParams, RowSearchParams,
SearchResponse, SearchResponse,
Table,
} from "@budibase/types" } from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters" import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external" import { handleRequest } from "../../../../api/controllers/row/external"
@ -18,13 +19,13 @@ import {
import { utils } from "@budibase/shared-core" import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "./types" import { ExportRowsParams, ExportRowsResult } from "./types"
import { HTTPError, db } from "@budibase/backend-core" import { HTTPError, db } from "@budibase/backend-core"
import { searchInputMapping } from "./utils"
import pick from "lodash/pick" import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor" import { outputProcessing } from "../../../../utilities/rowProcessor"
import sdk from "../../../" import sdk from "../../../"
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options const { tableId } = options
const { paginate, query, ...params } = options const { paginate, query, ...params } = options
@ -68,8 +69,6 @@ export async function search(
} }
try { try {
const table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
let rows = await handleRequest(Operation.READ, tableId, { let rows = await handleRequest(Operation.READ, tableId, {
filters: query, filters: query,
sort, sort,
@ -150,11 +149,15 @@ export async function exportRows(
} }
const datasource = await sdk.datasources.get(datasourceId!) const datasource = await sdk.datasources.get(datasourceId!)
const table = await sdk.tables.getTable(tableId)
if (!datasource || !datasource.entities) { if (!datasource || !datasource.entities) {
throw new HTTPError("Datasource has not been configured for plus API.", 400) throw new HTTPError("Datasource has not been configured for plus API.", 400)
} }
let result = await search({ tableId, query: requestQuery, sort, sortOrder }) let result = await search(
{ tableId, query: requestQuery, sort, sortOrder },
table
)
let rows: Row[] = [] let rows: Row[] = []
let headers let headers

View File

@ -1,6 +1,6 @@
import { context, db, HTTPError } from "@budibase/backend-core" import { context, db, HTTPError } from "@budibase/backend-core"
import env from "../../../../environment" import env from "../../../../environment"
import { fullSearch, paginatedSearch, searchInputMapping } from "./utils" import { fullSearch, paginatedSearch } from "./utils"
import { getRowParams, InternalTables } from "../../../../db/utils" import { getRowParams, InternalTables } from "../../../../db/utils"
import { import {
Database, Database,
@ -33,7 +33,8 @@ import pick from "lodash/pick"
import { breakRowIdField } from "../../../../integrations/utils" import { breakRowIdField } from "../../../../integrations/utils"
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId } = options const { tableId } = options
@ -51,8 +52,6 @@ export async function search(
query: {}, query: {},
} }
let table = await sdk.tables.getTable(tableId)
options = searchInputMapping(table, options)
if (params.sort && !params.sortType) { if (params.sort && !params.sortType) {
const schema = table.schema const schema = table.schema
const sortField = schema[params.sort] const sortField = schema[params.sort]
@ -122,12 +121,15 @@ export async function exportRows(
result = await outputProcessing<Row[]>(table, response) result = await outputProcessing<Row[]>(table, response)
} else if (query) { } else if (query) {
let searchResponse = await search({ let searchResponse = await search(
{
tableId, tableId,
query, query,
sort, sort,
sortOrder, sortOrder,
}) },
table
)
result = searchResponse.rows result = searchResponse.rows
} }

View File

@ -20,7 +20,12 @@ import {
} from "../../../../api/controllers/row/utils" } from "../../../../api/controllers/row/utils"
import sdk from "../../../index" import sdk from "../../../index"
import { context } from "@budibase/backend-core" import { context } from "@budibase/backend-core"
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" import {
CONSTANT_INTERNAL_ROW_COLS,
SQS_DATASOURCE_INTERNAL,
} from "../../../../db/utils"
import AliasTables from "../sqlAlias"
import { outputProcessing } from "../../../../utilities/rowProcessor"
function buildInternalFieldList( function buildInternalFieldList(
table: Table, table: Table,
@ -31,13 +36,14 @@ function buildInternalFieldList(
fieldList = fieldList.concat( fieldList = fieldList.concat(
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`) CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
) )
if (opts.relationships) {
for (let col of Object.values(table.schema)) { for (let col of Object.values(table.schema)) {
if (col.type === FieldType.LINK) { const isRelationship = col.type === FieldType.LINK
if (!opts.relationships && isRelationship) {
continue
}
if (isRelationship) {
const linkCol = col as RelationshipFieldMetadata const linkCol = col as RelationshipFieldMetadata
const relatedTable = tables.find( const relatedTable = tables.find(table => table._id === linkCol.tableId)!
table => table._id === linkCol.tableId
)!
fieldList = fieldList.concat( fieldList = fieldList.concat(
buildInternalFieldList(relatedTable, tables, { relationships: false }) buildInternalFieldList(relatedTable, tables, { relationships: false })
) )
@ -45,7 +51,6 @@ function buildInternalFieldList(
fieldList.push(`${table._id}.${col.name}`) fieldList.push(`${table._id}.${col.name}`)
} }
} }
}
return fieldList return fieldList
} }
@ -94,14 +99,14 @@ function buildTableMap(tables: Table[]) {
} }
export async function search( export async function search(
options: RowSearchParams options: RowSearchParams,
table: Table
): Promise<SearchResponse<Row>> { ): Promise<SearchResponse<Row>> {
const { tableId, paginate, query, ...params } = options const { paginate, query, ...params } = options
const builder = new SqlQueryBuilder(SqlClient.SQL_LITE) const builder = new SqlQueryBuilder(SqlClient.SQL_LITE)
const allTables = await sdk.tables.getAllInternalTables() const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables) const allTablesMap = buildTableMap(allTables)
const table = allTables.find(table => table._id === tableId)
if (!table) { if (!table) {
throw new Error("Unable to find table") throw new Error("Unable to find table")
} }
@ -111,7 +116,7 @@ export async function search(
const request: QueryJson = { const request: QueryJson = {
endpoint: { endpoint: {
// not important, we query ourselves // not important, we query ourselves
datasourceId: "internal", datasourceId: SQS_DATASOURCE_INTERNAL,
entityId: table._id!, entityId: table._id!,
operation: Operation.READ, operation: Operation.READ,
}, },
@ -132,7 +137,7 @@ export async function search(
type: "row", type: "row",
} }
if (params.sort && !params.sortType) { if (params.sort) {
const sortField = table.schema[params.sort] const sortField = table.schema[params.sort]
const sortType = const sortType =
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
@ -154,7 +159,9 @@ export async function search(
} }
} }
try { try {
const query = builder._query(request, { const alias = new AliasTables(allTables.map(table => table.name))
const rows = await alias.queryWithAliasing(request, async json => {
const query = builder._query(json, {
disableReturning: true, disableReturning: true,
}) })
@ -170,10 +177,11 @@ export async function search(
sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`") sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`")
const db = context.getAppDB() const db = context.getAppDB()
const rows = await db.sql<Row>(sql, bindings) return await db.sql<Row>(sql, bindings)
})
return { // process from the format of tableId.column to expected format
rows: await sqlOutputProcessing( const processed = await sqlOutputProcessing(
rows, rows,
table!, table!,
allTablesMap, allTablesMap,
@ -181,10 +189,17 @@ export async function search(
{ {
sqs: true, sqs: true,
} }
), )
return {
// final row processing for response
rows: await outputProcessing<Row[]>(table, processed, {
preserveLinks: true,
squash: true,
}),
} }
} catch (err: any) { } catch (err: any) {
const msg = typeof err === "string" ? err : err.message const msg = typeof err === "string" ? err : err.message
throw new Error(`Unable to search by SQL - ${msg}`) throw new Error(`Unable to search by SQL - ${msg}`, { cause: err })
} }
} }

View File

@ -112,7 +112,7 @@ describe("external search", () => {
tableId, tableId,
query: {}, query: {},
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
@ -130,7 +130,7 @@ describe("external search", () => {
query: {}, query: {},
fields: ["name", "age"], fields: ["name", "age"],
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
@ -157,7 +157,7 @@ describe("external search", () => {
}, },
}, },
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(3) expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual([1, 4, 8]) expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])

View File

@ -81,7 +81,7 @@ describe("internal", () => {
tableId, tableId,
query: {}, query: {},
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(
@ -99,7 +99,7 @@ describe("internal", () => {
query: {}, query: {},
fields: ["name", "age"], fields: ["name", "age"],
} }
const result = await search(searchParams) const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10) expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual( expect(result.rows).toEqual(

View File

@ -160,7 +160,7 @@ describe("internal search", () => {
const response = await search.paginatedSearch( const response = await search.paginatedSearch(
{ {
contains: { contains: {
column: "a", column: ["a"],
colArr: [1, 2, 3], colArr: [1, 2, 3],
}, },
}, },
@ -168,7 +168,7 @@ describe("internal search", () => {
) )
checkLucene( checkLucene(
response, response,
`(*:* AND column:a AND colArr:(1 AND 2 AND 3))`, `(*:* AND column:(a) AND colArr:(1 AND 2 AND 3))`,
PARAMS PARAMS
) )
}) })

View File

@ -6,11 +6,12 @@ import {
Row, Row,
SearchFilters, SearchFilters,
} from "@budibase/types" } from "@budibase/types"
import { getSQLClient } from "../../../sdk/app/rows/utils" import { getSQLClient } from "./utils"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import sdk from "../../../sdk" import datasources from "../datasources"
import { makeExternalQuery } from "../../../integrations/base/query" import { makeExternalQuery } from "../../../integrations/base/query"
import { SqlClient } from "../../../integrations/utils" import { SqlClient } from "../../../integrations/utils"
import { SQS_DATASOURCE_INTERNAL } from "../../../db/utils"
const WRITE_OPERATIONS: Operation[] = [ const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE, Operation.CREATE,
@ -156,12 +157,19 @@ export default class AliasTables {
} }
async queryWithAliasing( async queryWithAliasing(
json: QueryJson json: QueryJson,
queryFn?: (json: QueryJson) => Promise<DatasourcePlusQueryResponse>
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
const datasource = await sdk.datasources.get(datasourceId) const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
let aliasingEnabled: boolean, datasource: Datasource | undefined
if (isSqs) {
aliasingEnabled = true
} else {
datasource = await datasources.get(datasourceId)
aliasingEnabled = this.isAliasingEnabled(json, datasource)
}
const aliasingEnabled = this.isAliasingEnabled(json, datasource)
if (aliasingEnabled) { if (aliasingEnabled) {
json = cloneDeep(json) json = cloneDeep(json)
// run through the query json to update anywhere a table may be used // run through the query json to update anywhere a table may be used
@ -207,7 +215,15 @@ export default class AliasTables {
} }
json.tableAliases = invertedTableAliases json.tableAliases = invertedTableAliases
} }
const response = await makeExternalQuery(datasource, json)
let response: DatasourcePlusQueryResponse
if (datasource && !isSqs) {
response = await makeExternalQuery(datasource, json)
} else if (queryFn) {
response = await queryFn(json)
} else {
throw new Error("No supplied method to perform aliased query")
}
if (Array.isArray(response) && aliasingEnabled) { if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response) return this.reverse(response)
} else { } else {

View File

@ -52,6 +52,12 @@ export async function getDatasourceAndQuery(
): Promise<DatasourcePlusQueryResponse> { ): Promise<DatasourcePlusQueryResponse> {
const datasourceId = json.endpoint.datasourceId const datasourceId = json.endpoint.datasourceId
const datasource = await sdk.datasources.get(datasourceId) const datasource = await sdk.datasources.get(datasourceId)
const table = datasource.entities?.[json.endpoint.entityId]
if (!json.meta && table) {
json.meta = {
table,
}
}
return makeExternalQuery(datasource, json) return makeExternalQuery(datasource, json)
} }

View File

@ -3,7 +3,6 @@ import {
Operation, Operation,
RelationshipType, RelationshipType,
RenameColumn, RenameColumn,
AddColumn,
Table, Table,
TableRequest, TableRequest,
ViewV2, ViewV2,
@ -33,7 +32,7 @@ import * as viewSdk from "../../views"
export async function save( export async function save(
datasourceId: string, datasourceId: string,
update: Table, update: Table,
opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn } opts?: { tableId?: string; renaming?: RenameColumn }
) { ) {
let tableToSave: TableRequest = { let tableToSave: TableRequest = {
...update, ...update,
@ -52,6 +51,12 @@ export async function save(
!oldTable && !oldTable &&
(tableToSave.primary == null || tableToSave.primary.length === 0) (tableToSave.primary == null || tableToSave.primary.length === 0)
) { ) {
if (tableToSave.schema.id) {
throw new Error(
"External tables with no `primary` column set will define an `id` column, but we found an `id` column in the supplied schema. Either set a `primary` column or remove the `id` column."
)
}
tableToSave.primary = ["id"] tableToSave.primary = ["id"]
tableToSave.schema.id = { tableToSave.schema.id = {
type: FieldType.NUMBER, type: FieldType.NUMBER,
@ -179,14 +184,7 @@ export async function save(
// remove the rename prop // remove the rename prop
delete tableToSave._rename delete tableToSave._rename
// if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column
if (opts?.adding) {
datasource.entities[tableToSave.name] = (
await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name])
).tables[tableToSave.name]
} else {
datasource.entities[tableToSave.name] = tableToSave datasource.entities[tableToSave.name] = tableToSave
}
// store it into couch now for budibase reference // store it into couch now for budibase reference
await db.put(populateExternalTableSchemas(datasource)) await db.put(populateExternalTableSchemas(datasource))

View File

@ -1,8 +1,19 @@
import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core" import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
import { FieldType, SQLiteDefinition, SQLiteType, Table } from "@budibase/types" import {
FieldType,
RelationshipFieldMetadata,
SQLiteDefinition,
SQLiteTable,
SQLiteTables,
SQLiteType,
Table,
} from "@budibase/types"
import { cloneDeep } from "lodash" import { cloneDeep } from "lodash"
import tablesSdk from "../" import tablesSdk from "../"
import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" import {
CONSTANT_INTERNAL_ROW_COLS,
generateJunctionTableID,
} from "../../../../db/utils"
const BASIC_SQLITE_DOC: SQLiteDefinition = { const BASIC_SQLITE_DOC: SQLiteDefinition = {
_id: SQLITE_DESIGN_DOC_ID, _id: SQLITE_DESIGN_DOC_ID,
@ -31,14 +42,47 @@ const FieldTypeMap: Record<FieldType, SQLiteType> = {
[FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB, [FieldType.ATTACHMENT_SINGLE]: SQLiteType.BLOB,
[FieldType.ARRAY]: SQLiteType.BLOB, [FieldType.ARRAY]: SQLiteType.BLOB,
[FieldType.LINK]: SQLiteType.BLOB, [FieldType.LINK]: SQLiteType.BLOB,
[FieldType.BIGINT]: SQLiteType.REAL, [FieldType.BIGINT]: SQLiteType.TEXT,
// TODO: consider the difference between multi-user and single user types (subtyping) // TODO: consider the difference between multi-user and single user types (subtyping)
[FieldType.BB_REFERENCE]: SQLiteType.TEXT, [FieldType.BB_REFERENCE]: SQLiteType.TEXT,
} }
function mapTable(table: Table): { [key: string]: SQLiteType } { function buildRelationshipDefinitions(
table: Table,
relationshipColumn: RelationshipFieldMetadata
): {
tableId: string
definition: SQLiteTable
} {
const tableId = table._id!,
relatedTableId = relationshipColumn.tableId
return {
tableId: generateJunctionTableID(tableId, relatedTableId),
definition: {
["doc1.rowId"]: SQLiteType.TEXT,
["doc1.tableId"]: SQLiteType.TEXT,
["doc1.fieldName"]: SQLiteType.TEXT,
["doc2.rowId"]: SQLiteType.TEXT,
["doc2.tableId"]: SQLiteType.TEXT,
["doc2.fieldName"]: SQLiteType.TEXT,
tableId: SQLiteType.TEXT,
},
}
}
// this can generate relationship tables as part of the mapping
function mapTable(table: Table): SQLiteTables {
const tables: SQLiteTables = {}
const fields: Record<string, SQLiteType> = {} const fields: Record<string, SQLiteType> = {}
for (let [key, column] of Object.entries(table.schema)) { for (let [key, column] of Object.entries(table.schema)) {
// relationships should be handled differently
if (column.type === FieldType.LINK) {
const { tableId, definition } = buildRelationshipDefinitions(
table,
column
)
tables[tableId] = { fields: definition }
}
if (!FieldTypeMap[column.type]) { if (!FieldTypeMap[column.type]) {
throw new Error(`Unable to map type "${column.type}" to SQLite type`) throw new Error(`Unable to map type "${column.type}" to SQLite type`)
} }
@ -49,10 +93,12 @@ function mapTable(table: Table): { [key: string]: SQLiteType } {
CONSTANT_INTERNAL_ROW_COLS.forEach(col => { CONSTANT_INTERNAL_ROW_COLS.forEach(col => {
constantMap[col] = SQLiteType.TEXT constantMap[col] = SQLiteType.TEXT
}) })
return { const thisTable: SQLiteTable = {
...constantMap, ...constantMap,
...fields, ...fields,
} }
tables[table._id!] = { fields: thisTable }
return tables
} }
// nothing exists, need to iterate though existing tables // nothing exists, need to iterate though existing tables
@ -60,8 +106,9 @@ async function buildBaseDefinition(): Promise<SQLiteDefinition> {
const tables = await tablesSdk.getAllInternalTables() const tables = await tablesSdk.getAllInternalTables()
const definition = cloneDeep(BASIC_SQLITE_DOC) const definition = cloneDeep(BASIC_SQLITE_DOC)
for (let table of tables) { for (let table of tables) {
definition.sql.tables[table._id!] = { definition.sql.tables = {
fields: mapTable(table), ...definition.sql.tables,
...mapTable(table),
} }
} }
return definition return definition
@ -75,8 +122,9 @@ export async function addTableToSqlite(table: Table) {
} catch (err) { } catch (err) {
definition = await buildBaseDefinition() definition = await buildBaseDefinition()
} }
definition.sql.tables[table._id!] = { definition.sql.tables = {
fields: mapTable(table), ...definition.sql.tables,
...mapTable(table),
} }
await db.put(definition) await db.put(definition)
} }

Some files were not shown because too many files have changed in this diff Show More