diff --git a/charts/budibase/README.md b/charts/budibase/README.md
index b803da18a4..207992087d 100644
--- a/charts/budibase/README.md
+++ b/charts/budibase/README.md
@@ -152,6 +152,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. |
| services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. |
| services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. |
+| services.apps.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main apps container. |
+| services.apps.extraVolumes | list | `[]` | Additional volumes to the apps pod. |
| services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. |
| services.apps.livenessProbe | object | HTTP health checks. | Liveness probe configuration for apps pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.apps.logLevel | string | `"info"` | The log level for the apps service. |
@@ -166,6 +168,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. |
| services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. |
| services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. |
+| services.automationWorkers.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main automationWorkers container. |
+| services.automationWorkers.extraVolumes | list | `[]` | Additional volumes to the automationWorkers pod. |
| services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.automationWorkers.logLevel | string | `"info"` | The log level for the automation worker service. |
| services.automationWorkers.readinessProbe | object | HTTP health checks. | Readiness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: |
@@ -185,6 +189,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.objectStore.cloudfront.privateKey64 | string | `""` | Base64 encoded private key for the above public key. |
| services.objectStore.cloudfront.publicKeyId | string | `""` | ID of public key stored in cloudfront. |
| services.objectStore.extraContainers | list | `[]` | Additional containers to be added to the objectStore pod. |
+| services.objectStore.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main objectStore container. |
+| services.objectStore.extraVolumes | list | `[]` | Additional volumes to the objectStore pod. |
| services.objectStore.minio | bool | `true` | Set to false if using another object store, such as S3. You will need to set `services.objectStore.url` to point to your bucket if you do this. |
| services.objectStore.region | string | `""` | AWS_REGION if using S3 |
| services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See for more information on how to set these. |
@@ -197,6 +203,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.proxy.autoscaling.minReplicas | int | `1` | |
| services.proxy.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the proxy service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the proxy pods. |
| services.proxy.extraContainers | list | `[]` | |
+| services.proxy.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main proxy container. |
+| services.proxy.extraVolumes | list | `[]` | Additional volumes to the proxy pod. |
| services.proxy.livenessProbe | object | HTTP health checks. | Liveness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.proxy.readinessProbe | object | HTTP health checks. | Readiness probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.proxy.replicaCount | int | `1` | The number of proxy replicas to run. |
@@ -204,6 +212,9 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.proxy.startupProbe | object | HTTP health checks. | Startup probe configuration for proxy pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.redis.enabled | bool | `true` | Whether or not to deploy a Redis pod into your cluster. |
| services.redis.extraContainers | list | `[]` | Additional containers to be added to the redis pod. |
+| services.redis.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main redis container. |
+| services.redis.extraVolumes | list | `[]` | Additional volumes to the redis pod. |
+| services.redis.image | string | `"redis"` | The Redis image to use. |
| services.redis.password | string | `"budibase"` | The password to use when connecting to Redis. It's recommended that you change this from the default if you're running Redis in-cluster. |
| services.redis.port | int | `6379` | Port to expose Redis on. |
| services.redis.resources | object | `{}` | The resources to use for Redis pods. See for more information on how to set these. |
@@ -216,6 +227,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml
| services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. |
| services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. |
| services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. |
+| services.worker.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main worker container. |
+| services.worker.extraVolumes | list | `[]` | Additional volumes to the worker pod. |
| services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. |
| services.worker.livenessProbe | object | HTTP health checks. | Liveness probe configuration for worker pods. You shouldn't need to change this, but if you want to you can find more information here: |
| services.worker.logLevel | string | `"info"` | The log level for the worker service. |
diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml
index 2fd8506e30..b380908dd1 100644
--- a/charts/budibase/templates/app-service-deployment.yaml
+++ b/charts/budibase/templates/app-service-deployment.yaml
@@ -235,6 +235,10 @@ spec:
args:
{{- toYaml .Values.services.apps.args | nindent 10 }}
{{ end }}
+ {{ if .Values.services.apps.extraVolumeMounts }}
+ volumeMounts:
+ {{- toYaml .Values.services.apps.extraVolumeMounts | nindent 10 }}
+ {{- end }}
{{- if .Values.services.apps.extraContainers }}
{{- toYaml .Values.services.apps.extraContainers | nindent 6 }}
{{- end }}
@@ -261,4 +265,8 @@ spec:
- name: ndots
value: {{ .Values.services.apps.ndots | quote }}
{{ end }}
+ {{ if .Values.services.apps.extraVolumes }}
+ volumes:
+ {{- toYaml .Values.services.apps.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
diff --git a/charts/budibase/templates/automation-worker-service-deployment.yaml b/charts/budibase/templates/automation-worker-service-deployment.yaml
index 53d5fcc860..51fa9ee4bb 100644
--- a/charts/budibase/templates/automation-worker-service-deployment.yaml
+++ b/charts/budibase/templates/automation-worker-service-deployment.yaml
@@ -235,6 +235,10 @@ spec:
args:
{{- toYaml .Values.services.automationWorkers.args | nindent 10 }}
{{ end }}
+ {{ if .Values.services.automationWorkers.extraVolumeMounts }}
+ volumeMounts:
+ {{- toYaml .Values.services.automationWorkers.extraVolumeMounts | nindent 10 }}
+ {{ end }}
{{- if .Values.services.automationWorkers.extraContainers }}
{{- toYaml .Values.services.automationWorkers.extraContainers | nindent 6 }}
{{- end }}
@@ -261,5 +265,9 @@ spec:
- name: ndots
value: {{ .Values.services.automationWorkers.ndots | quote }}
{{ end }}
+ {{ if .Values.services.automationWorkers.extraVolumes }}
+ volumes:
+ {{- toYaml .Values.services.automationWorkers.extraVolumes | nindent 8 }}
+ {{ end }}
status: {}
{{- end }}
\ No newline at end of file
diff --git a/charts/budibase/templates/minio-service-deployment.yaml b/charts/budibase/templates/minio-service-deployment.yaml
index ade1d37cd2..901ead2b46 100644
--- a/charts/budibase/templates/minio-service-deployment.yaml
+++ b/charts/budibase/templates/minio-service-deployment.yaml
@@ -54,6 +54,9 @@ spec:
volumeMounts:
- mountPath: /data
name: minio-data
+ {{ if .Values.services.objectStore.extraVolumeMounts }}
+ {{- toYaml .Values.services.objectStore.extraVolumeMounts | nindent 8 }}
+ {{- end }}
{{- if .Values.services.objectStore.extraContainers }}
{{- toYaml .Values.services.objectStore.extraContainers | nindent 6 }}
{{- end }}
@@ -78,5 +81,8 @@ spec:
- name: minio-data
persistentVolumeClaim:
claimName: minio-data
+ {{ if .Values.services.objectStore.extraVolumes }}
+ {{- toYaml .Values.services.objectStore.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
{{- end }}
diff --git a/charts/budibase/templates/proxy-service-deployment.yaml b/charts/budibase/templates/proxy-service-deployment.yaml
index 462c6a0749..d5ea696431 100644
--- a/charts/budibase/templates/proxy-service-deployment.yaml
+++ b/charts/budibase/templates/proxy-service-deployment.yaml
@@ -82,6 +82,10 @@ spec:
resources:
{{- toYaml . | nindent 10 }}
{{ end }}
+ {{ if .Values.services.proxy.extraVolumeMounts }}
+ volumeMounts:
+ {{- toYaml .Values.services.proxy.extraVolumeMounts | nindent 8 }}
+ {{- end }}
{{- if .Values.services.proxy.extraContainers }}
{{- toYaml .Values.services.proxy.extraContainers | nindent 6 }}
{{- end }}
@@ -110,7 +114,10 @@ spec:
args:
{{- toYaml .Values.services.proxy.args | nindent 8 }}
{{ end }}
+ {{ if .Values.services.proxy.extraVolumes }}
volumes:
+ {{- toYaml .Values.services.proxy.extraVolumes | nindent 6 }}
+ {{ end }}
{{ if .Values.services.proxy.ndots }}
dnsConfig:
options:
diff --git a/charts/budibase/templates/redis-service-deployment.yaml b/charts/budibase/templates/redis-service-deployment.yaml
index 1a003d3814..9ad12e0167 100644
--- a/charts/budibase/templates/redis-service-deployment.yaml
+++ b/charts/budibase/templates/redis-service-deployment.yaml
@@ -22,7 +22,7 @@ spec:
- redis-server
- --requirepass
- {{ .Values.services.redis.password }}
- image: redis
+ image: {{ .Values.services.redis.image }}
imagePullPolicy: ""
name: redis-service
ports:
@@ -34,6 +34,9 @@ spec:
volumeMounts:
- mountPath: /data
name: redis-data
+ {{ if .Values.services.redis.extraVolumeMounts }}
+ {{- toYaml .Values.services.redis.extraVolumeMounts | nindent 8 }}
+ {{- end }}
{{- if .Values.services.redis.extraContainers }}
{{- toYaml .Values.services.redis.extraContainers | nindent 6 }}
{{- end }}
@@ -58,6 +61,9 @@ spec:
- name: redis-data
persistentVolumeClaim:
claimName: redis-data
+ {{ if .Values.services.redis.extraVolumes }}
+ {{- toYaml .Values.services.redis.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
{{- end }}
diff --git a/charts/budibase/templates/worker-service-deployment.yaml b/charts/budibase/templates/worker-service-deployment.yaml
index cc27bf429e..e37b2bc0e4 100644
--- a/charts/budibase/templates/worker-service-deployment.yaml
+++ b/charts/budibase/templates/worker-service-deployment.yaml
@@ -221,6 +221,10 @@ spec:
args:
{{- toYaml .Values.services.worker.args | nindent 10 }}
{{ end }}
+ {{ if .Values.services.worker.extraVolumeMounts }}
+ volumeMounts:
+ {{- toYaml .Values.services.worker.extraVolumeMounts | nindent 10 }}
+ {{- end }}
{{- if .Values.services.worker.extraContainers }}
{{- toYaml .Values.services.worker.extraContainers | nindent 6 }}
{{- end }}
@@ -247,4 +251,8 @@ spec:
- name: ndots
value: {{ .Values.services.worker.ndots | quote }}
{{ end }}
+ {{ if .Values.services.worker.extraVolumes }}
+ volumes:
+ {{- toYaml .Values.services.worker.extraVolumes | nindent 6 }}
+ {{- end }}
status: {}
diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml
index dfbbca6cad..9ace768625 100644
--- a/charts/budibase/values.yaml
+++ b/charts/budibase/values.yaml
@@ -211,6 +211,16 @@ services:
# - name: my-sidecar
# image: myimage:latest
+ # -- Additional volumeMounts to the main proxy container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the proxy pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
+
apps:
# @ignore (you shouldn't need to change this)
port: 4002
@@ -283,6 +293,16 @@ services:
# - name: my-sidecar
# image: myimage:latest
+ # -- Additional volumeMounts to the main apps container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the apps pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
+
automationWorkers:
# -- Whether or not to enable the automation worker service. If you disable this,
# automations will be processed by the apps service.
@@ -359,6 +379,16 @@ services:
# - name: my-sidecar
# image: myimage:latest
+ # -- Additional volumeMounts to the main automationWorkers container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the automationWorkers pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
+
worker:
# @ignore (you shouldn't need to change this)
port: 4003
@@ -431,6 +461,16 @@ services:
# - name: my-sidecar
# image: myimage:latest
+ # -- Additional volumeMounts to the main worker container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the worker pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
+
couchdb:
# -- Whether or not to spin up a CouchDB instance in your cluster. True by
# default, and the configuration for the CouchDB instance is under the
@@ -456,6 +496,8 @@ services:
resources: {}
redis:
+ # -- The Redis image to use.
+ image: redis
# -- Whether or not to deploy a Redis pod into your cluster.
enabled: true
# -- Port to expose Redis on.
@@ -484,6 +526,16 @@ services:
# - name: my-sidecar
# image: myimage:latest
+ # -- Additional volumeMounts to the main redis container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the redis pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
+
objectStore:
# -- Set to false if using another object store, such as S3. You will need
# to set `services.objectStore.url` to point to your bucket if you do this.
@@ -530,6 +582,16 @@ services:
# - name: my-sidecar
# image: myimage:latest
+ # -- Additional volumeMounts to the main objectStore container.
+ extraVolumeMounts: []
+ # - name: my-volume
+ # mountPath: /path/to/mount
+
+ # -- Additional volumes to the objectStore pod.
+ extraVolumes: []
+ # - name: my-volume
+ # emptyDir: {}
+
# Override values in couchDB subchart. We're only specifying the values we're changing.
# If you want to see all of the available values, see:
# https://github.com/apache/couchdb-helm/tree/couchdb-4.3.0/couchdb
diff --git a/hosting/.env b/hosting/.env
index 8a0756c0e3..173d409d04 100644
--- a/hosting/.env
+++ b/hosting/.env
@@ -17,6 +17,7 @@ APP_PORT=4002
WORKER_PORT=4003
MINIO_PORT=4004
COUCH_DB_PORT=4005
+COUCH_DB_SQS_PORT=4006
REDIS_PORT=6379
WATCHTOWER_PORT=6161
BUDIBASE_ENVIRONMENT=PRODUCTION
@@ -28,4 +29,4 @@ BB_ADMIN_USER_PASSWORD=
# A path that is watched for plugin bundles. Any bundles found are imported automatically/
PLUGINS_DIR=
-ROLLING_LOG_MAX_SIZE=
\ No newline at end of file
+ROLLING_LOG_MAX_SIZE=
diff --git a/lerna.json b/lerna.json
index 78a3aa13e9..a2be7be7b4 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.23.4",
+ "version": "2.23.6",
"npmClient": "yarn",
"packages": [
"packages/*",
diff --git a/package.json b/package.json
index 2816247939..e520b7c2cf 100644
--- a/package.json
+++ b/package.json
@@ -56,6 +56,7 @@
"dev:noserver": "yarn run kill-builder && lerna run --stream dev:stack:up --ignore @budibase/account-portal-server && lerna run --stream dev --ignore @budibase/backend-core --ignore @budibase/server --ignore @budibase/worker --ignore=@budibase/account-portal-ui --ignore @budibase/account-portal-server",
"dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server",
"dev:accountportal": "yarn kill-accountportal && lerna run dev --stream --scope @budibase/account-portal-ui --scope @budibase/account-portal-server",
+ "dev:camunda": "./scripts/deploy-camunda.sh",
"dev:all": "yarn run kill-all && lerna run --stream dev",
"dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built",
"dev:docker": "yarn build --scope @budibase/server --scope @budibase/worker && docker-compose -f hosting/docker-compose.build.yaml -f hosting/docker-compose.dev.yaml --env-file hosting/.env up --build --scale proxy-service=0",
diff --git a/packages/account-portal b/packages/account-portal
index a0ee9cad8c..328c84234d 160000
--- a/packages/account-portal
+++ b/packages/account-portal
@@ -1 +1 @@
-Subproject commit a0ee9cad8cefb8f9f40228705711be174f018fa9
+Subproject commit 328c84234d11d97d840f0eb2c72665b04ba9e4f8
diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts
index 2da2a77d67..8dbc904643 100644
--- a/packages/backend-core/src/environment.ts
+++ b/packages/backend-core/src/environment.ts
@@ -107,7 +107,7 @@ const environment = {
ENCRYPTION_KEY: process.env.ENCRYPTION_KEY,
API_ENCRYPTION_KEY: getAPIEncryptionKey(),
COUCH_DB_URL: process.env.COUCH_DB_URL || "http://localhost:4005",
- COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4984",
+ COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || "http://localhost:4006",
COUCH_DB_USERNAME: process.env.COUCH_DB_USER,
COUCH_DB_PASSWORD: process.env.COUCH_DB_PASSWORD,
GOOGLE_CLIENT_ID: process.env.GOOGLE_CLIENT_ID,
diff --git a/packages/builder/src/components/common/Dropzone.svelte b/packages/builder/src/components/common/Dropzone.svelte
index daa6ad1807..a864e1d028 100644
--- a/packages/builder/src/components/common/Dropzone.svelte
+++ b/packages/builder/src/components/common/Dropzone.svelte
@@ -27,14 +27,6 @@
return []
}
}
-
- async function deleteAttachments(fileList) {
- try {
- return await API.deleteBuilderAttachments(fileList)
- } catch (error) {
- return []
- }
- }
diff --git a/packages/client/src/components/app/NavItem.svelte b/packages/client/src/components/app/NavItem.svelte
index 360ea4f8bb..fcdda57ace 100644
--- a/packages/client/src/components/app/NavItem.svelte
+++ b/packages/client/src/components/app/NavItem.svelte
@@ -157,6 +157,11 @@
width: 100%;
}
+ /* Use normal theme colors for links when using a top nav */
+ .dropdown:not(.left) .sublinks a {
+ color: var(--spectrum-alias-text-color);
+ }
+
/* Left dropdowns */
.dropdown.left .sublinks-wrapper {
display: none;
diff --git a/packages/client/src/components/app/forms/AttachmentField.svelte b/packages/client/src/components/app/forms/AttachmentField.svelte
index 644630810d..3489fd809c 100644
--- a/packages/client/src/components/app/forms/AttachmentField.svelte
+++ b/packages/client/src/components/app/forms/AttachmentField.svelte
@@ -58,17 +58,6 @@
}
}
- const deleteAttachments = async fileList => {
- try {
- return await API.deleteAttachments({
- keys: fileList,
- tableId: formContext?.dataSource?.tableId,
- })
- } catch (error) {
- return []
- }
- }
-
const handleChange = e => {
const value = fieldApiMapper.set(e.detail)
const changed = fieldApi.setValue(value)
@@ -98,7 +87,6 @@
error={fieldState.error}
on:change={handleChange}
{processFiles}
- {deleteAttachments}
{handleFileTooLarge}
{handleTooManyFiles}
{maximum}
diff --git a/packages/frontend-core/src/api/attachments.js b/packages/frontend-core/src/api/attachments.js
index cff466c86f..72f280d99d 100644
--- a/packages/frontend-core/src/api/attachments.js
+++ b/packages/frontend-core/src/api/attachments.js
@@ -61,34 +61,6 @@ export const buildAttachmentEndpoints = API => {
})
return { publicUrl }
},
-
- /**
- * Deletes attachments from the bucket.
- * @param keys the attachments to delete
- * @param tableId the associated table ID
- */
- deleteAttachments: async ({ keys, tableId }) => {
- return await API.post({
- url: `/api/attachments/${tableId}/delete`,
- body: {
- keys,
- },
- })
- },
-
- /**
- * Deletes attachments from the builder bucket.
- * @param keys the attachments to delete
- */
- deleteBuilderAttachments: async keys => {
- return await API.post({
- url: `/api/attachments/delete`,
- body: {
- keys,
- },
- })
- },
-
/**
* Download an attachment from a row given its column name.
* @param datasourceId the ID of the datasource to download from
diff --git a/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte b/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte
index 3a1f165b6e..e7dc51e5d5 100644
--- a/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte
+++ b/packages/frontend-core/src/components/grid/cells/AttachmentCell.svelte
@@ -61,14 +61,6 @@
}
}
- const deleteAttachments = async fileList => {
- try {
- return await API.deleteBuilderAttachments(fileList)
- } catch (error) {
- return []
- }
- }
-
onMount(() => {
api = {
focus: () => open(),
@@ -101,7 +93,6 @@
on:change={e => onChange(e.detail)}
maximum={maximum || schema.constraints?.length?.maximum}
{processFiles}
- {deleteAttachments}
{handleFileTooLarge}
/>
diff --git a/packages/pro b/packages/pro
index ef186d0024..c68183402b 160000
--- a/packages/pro
+++ b/packages/pro
@@ -1 +1 @@
-Subproject commit ef186d00241f96037f9fd34d7a3826041977ab3a
+Subproject commit c68183402b8fb17248572006531d5293ffc8a9ac
diff --git a/packages/server/package.json b/packages/server/package.json
index ad03033e67..76402785d7 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -125,7 +125,7 @@
"@babel/preset-env": "7.16.11",
"@swc/core": "1.3.71",
"@swc/jest": "0.2.27",
- "@types/archiver": "^6.0.2",
+ "@types/archiver": "6.0.2",
"@types/global-agent": "2.1.1",
"@types/google-spreadsheet": "3.1.5",
"@types/jest": "29.5.5",
diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts
index ceef421fab..6acdfcd465 100644
--- a/packages/server/src/api/controllers/application.ts
+++ b/packages/server/src/api/controllers/application.ts
@@ -320,6 +320,7 @@ async function performAppCreate(ctx: UserCtx) {
"theme",
"customTheme",
"icon",
+ "snippets",
]
keys.forEach(key => {
if (existing[key]) {
diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts
index 7fc0333de1..3dd3f9b8e7 100644
--- a/packages/server/src/api/controllers/row/ExternalRequest.ts
+++ b/packages/server/src/api/controllers/row/ExternalRequest.ts
@@ -36,7 +36,6 @@ import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils"
import { processObjectSync } from "@budibase/string-templates"
import { cloneDeep } from "lodash/fp"
import { db as dbCore } from "@budibase/backend-core"
-import AliasTables from "./alias"
import sdk from "../../../sdk"
import env from "../../../environment"
@@ -120,6 +119,9 @@ async function removeManyToManyRelationships(
endpoint: getEndpoint(tableId, Operation.DELETE),
body: { [colName]: null },
filters,
+ meta: {
+ table,
+ },
})
} else {
return []
@@ -134,6 +136,9 @@ async function removeOneToManyRelationships(rowId: string, table: Table) {
return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE),
filters,
+ meta: {
+ table,
+ },
})
} else {
return []
@@ -249,6 +254,9 @@ export class ExternalRequest {
const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ),
filters: buildFilters(rowId, {}, table),
+ meta: {
+ table,
+ },
})
if (Array.isArray(response) && response.length > 0) {
return response[0]
@@ -396,6 +404,9 @@ export class ExternalRequest {
[fieldName]: row[lookupField],
},
},
+ meta: {
+ table,
+ },
})
// this is the response from knex if no rows found
const rows: Row[] =
@@ -426,6 +437,7 @@ export class ExternalRequest {
// if we're creating (in a through table) need to wipe the existing ones first
const promises = []
const related = await this.lookupRelations(mainTableId, row)
+ const table = this.getTable(mainTableId)!
for (let relationship of relationships) {
const { key, tableId, isUpdate, id, ...rest } = relationship
const body: { [key: string]: any } = processObjectSync(rest, row, {})
@@ -471,6 +483,9 @@ export class ExternalRequest {
// if we're doing many relationships then we're writing, only one response
body,
filters: buildFilters(id, {}, linkTable),
+ meta: {
+ table,
+ },
})
)
} else {
@@ -618,7 +633,7 @@ export class ExternalRequest {
if (env.SQL_ALIASING_DISABLE) {
response = await getDatasourceAndQuery(json)
} else {
- const aliasing = new AliasTables(Object.keys(this.tables))
+ const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
response = await aliasing.queryWithAliasing(json)
}
diff --git a/packages/server/src/api/controllers/row/utils/basic.ts b/packages/server/src/api/controllers/row/utils/basic.ts
index 1fc84de9c7..6255e13c1c 100644
--- a/packages/server/src/api/controllers/row/utils/basic.ts
+++ b/packages/server/src/api/controllers/row/utils/basic.ts
@@ -62,12 +62,12 @@ export function basicProcessing({
row,
table,
isLinked,
- internal,
+ sqs,
}: {
row: Row
table: Table
isLinked: boolean
- internal?: boolean
+ sqs?: boolean
}): Row {
const thisRow: Row = {}
// filter the row down to what is actually the row (not joined)
@@ -84,12 +84,13 @@ export function basicProcessing({
thisRow[fieldName] = value
}
}
- if (!internal) {
+ if (!sqs) {
thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id
thisRow._rev = "rev"
} else {
- for (let internalColumn of CONSTANT_INTERNAL_ROW_COLS) {
+ const columns = Object.keys(table.schema)
+ for (let internalColumn of [...CONSTANT_INTERNAL_ROW_COLS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({
row,
tableName: table._id!,
diff --git a/packages/server/src/api/controllers/row/utils/sqlUtils.ts b/packages/server/src/api/controllers/row/utils/sqlUtils.ts
index 6f9837e0ab..372b8394ff 100644
--- a/packages/server/src/api/controllers/row/utils/sqlUtils.ts
+++ b/packages/server/src/api/controllers/row/utils/sqlUtils.ts
@@ -51,11 +51,11 @@ export async function updateRelationshipColumns(
continue
}
- let linked = await basicProcessing({
+ let linked = basicProcessing({
row,
table: linkedTable,
isLinked: true,
- internal: opts?.sqs,
+ sqs: opts?.sqs,
})
if (!linked._id) {
continue
diff --git a/packages/server/src/api/controllers/row/utils/utils.ts b/packages/server/src/api/controllers/row/utils/utils.ts
index f387a468cf..bf9ede6fe3 100644
--- a/packages/server/src/api/controllers/row/utils/utils.ts
+++ b/packages/server/src/api/controllers/row/utils/utils.ts
@@ -132,6 +132,7 @@ export async function sqlOutputProcessing(
let rowId = row._id
if (opts?.sqs) {
rowId = getInternalRowId(row, table)
+ row._id = rowId
} else if (!rowId) {
rowId = generateIdForRow(row, table)
row._id = rowId
@@ -153,7 +154,7 @@ export async function sqlOutputProcessing(
row,
table,
isLinked: false,
- internal: opts?.sqs,
+ sqs: opts?.sqs,
}),
table
)
@@ -167,7 +168,8 @@ export async function sqlOutputProcessing(
tables,
row,
finalRows,
- relationships
+ relationships,
+ opts
)
}
diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts
index d767ca9e98..7cc08e1b5c 100644
--- a/packages/server/src/api/controllers/static/index.ts
+++ b/packages/server/src/api/controllers/static/index.ts
@@ -127,13 +127,6 @@ export const uploadFile = async function (
)
}
-export const deleteObjects = async function (ctx: Ctx) {
- ctx.body = await objectStore.deleteFiles(
- ObjectStoreBuckets.APPS,
- ctx.request.body.keys
- )
-}
-
const requiresMigration = async (ctx: Ctx) => {
const appId = context.getAppId()
if (!appId) {
diff --git a/packages/server/src/api/controllers/table/ExternalRequest.ts b/packages/server/src/api/controllers/table/ExternalRequest.ts
index 65cead3a1d..1e57ea3294 100644
--- a/packages/server/src/api/controllers/table/ExternalRequest.ts
+++ b/packages/server/src/api/controllers/table/ExternalRequest.ts
@@ -22,6 +22,7 @@ export async function makeTableRequest(
operation,
},
meta: {
+ table,
tables,
},
table,
diff --git a/packages/server/src/api/routes/static.ts b/packages/server/src/api/routes/static.ts
index a5c421b2e6..f331609923 100644
--- a/packages/server/src/api/routes/static.ts
+++ b/packages/server/src/api/routes/static.ts
@@ -32,11 +32,6 @@ router
.get("/builder/:file*", controller.serveBuilder)
.get("/api/assets/client", controller.serveClientLibrary)
.post("/api/attachments/process", authorized(BUILDER), controller.uploadFile)
- .post(
- "/api/attachments/delete",
- authorized(BUILDER),
- controller.deleteObjects
- )
.post("/api/beta/:feature", controller.toggleBetaUiFeature)
.post(
"/api/attachments/:tableId/upload",
@@ -44,12 +39,6 @@ router
authorized(PermissionType.TABLE, PermissionLevel.WRITE),
controller.uploadFile
)
- .post(
- "/api/attachments/:tableId/delete",
- paramResource("tableId"),
- authorized(PermissionType.TABLE, PermissionLevel.WRITE),
- controller.deleteObjects
- )
.get("/app/preview", authorized(BUILDER), controller.serveBuilderPreview)
.get("/app/:appUrl/:path*", controller.serveApp)
.get("/:appId/:path*", controller.serveApp)
diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts
index fdf1ed7603..5b71ec9044 100644
--- a/packages/server/src/api/routes/tests/search.spec.ts
+++ b/packages/server/src/api/routes/tests/search.spec.ts
@@ -8,6 +8,8 @@ import {
FieldType,
RowSearchParams,
SearchFilters,
+ SortOrder,
+ SortType,
Table,
TableSchema,
} from "@budibase/types"
@@ -62,7 +64,32 @@ describe.each([
class SearchAssertion {
constructor(private readonly query: RowSearchParams) {}
- async toFind(expectedRows: any[]) {
+ // Asserts that the query returns rows matching exactly the set of rows
+ // passed in. The order of the rows matters. Rows returned in an order
+ // different to the one passed in will cause the assertion to fail. Extra
+ // rows returned by the query will also cause the assertion to fail.
+ async toMatchExactly(expectedRows: any[]) {
+ const { rows: foundRows } = await config.api.row.search(table._id!, {
+ ...this.query,
+ tableId: table._id!,
+ })
+
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toHaveLength(expectedRows.length)
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toEqual(
+ expectedRows.map((expectedRow: any) =>
+ expect.objectContaining(
+ foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
+ )
+ )
+ )
+ }
+
+ // Asserts that the query returns rows matching exactly the set of rows
+ // passed in. The order of the rows is not important, but extra rows will
+ // cause the assertion to fail.
+ async toContainExactly(expectedRows: any[]) {
const { rows: foundRows } = await config.api.row.search(table._id!, {
...this.query,
tableId: table._id!,
@@ -82,8 +109,39 @@ describe.each([
)
}
+ // Asserts that the query returns rows matching the set of rows passed in.
+ // The order of the rows is not important. Extra rows will not cause the
+ // assertion to fail.
+ async toContain(expectedRows: any[]) {
+ const { rows: foundRows } = await config.api.row.search(table._id!, {
+ ...this.query,
+ tableId: table._id!,
+ })
+
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toEqual(
+ expect.arrayContaining(
+ expectedRows.map((expectedRow: any) =>
+ expect.objectContaining(
+ foundRows.find(foundRow => _.isMatch(foundRow, expectedRow))
+ )
+ )
+ )
+ )
+ }
+
async toFindNothing() {
- await this.toFind([])
+ await this.toContainExactly([])
+ }
+
+ async toHaveLength(length: number) {
+ const { rows: foundRows } = await config.api.row.search(table._id!, {
+ ...this.query,
+ tableId: table._id!,
+ })
+
+ // eslint-disable-next-line jest/no-standalone-expect
+ expect(foundRows).toHaveLength(length)
}
}
@@ -105,28 +163,33 @@ describe.each([
describe("misc", () => {
it("should return all if no query is passed", () =>
- expectSearch({} as RowSearchParams).toFind([
+ expectSearch({} as RowSearchParams).toContainExactly([
{ name: "foo" },
{ name: "bar" },
]))
it("should return all if empty query is passed", () =>
- expectQuery({}).toFind([{ name: "foo" }, { name: "bar" }]))
+ expectQuery({}).toContainExactly([{ name: "foo" }, { name: "bar" }]))
it("should return all if onEmptyFilter is RETURN_ALL", () =>
expectQuery({
onEmptyFilter: EmptyFilterOption.RETURN_ALL,
- }).toFind([{ name: "foo" }, { name: "bar" }]))
+ }).toContainExactly([{ name: "foo" }, { name: "bar" }]))
it("should return nothing if onEmptyFilter is RETURN_NONE", () =>
expectQuery({
onEmptyFilter: EmptyFilterOption.RETURN_NONE,
}).toFindNothing())
+
+ it("should respect limit", () =>
+ expectSearch({ limit: 1, paginate: true, query: {} }).toHaveLength(1))
})
describe("equal", () => {
it("successfully finds a row", () =>
- expectQuery({ equal: { name: "foo" } }).toFind([{ name: "foo" }]))
+ expectQuery({ equal: { name: "foo" } }).toContainExactly([
+ { name: "foo" },
+ ]))
it("fails to find nonexistent row", () =>
expectQuery({ equal: { name: "none" } }).toFindNothing())
@@ -134,15 +197,21 @@ describe.each([
describe("notEqual", () => {
it("successfully finds a row", () =>
- expectQuery({ notEqual: { name: "foo" } }).toFind([{ name: "bar" }]))
+ expectQuery({ notEqual: { name: "foo" } }).toContainExactly([
+ { name: "bar" },
+ ]))
it("fails to find nonexistent row", () =>
- expectQuery({ notEqual: { name: "bar" } }).toFind([{ name: "foo" }]))
+ expectQuery({ notEqual: { name: "bar" } }).toContainExactly([
+ { name: "foo" },
+ ]))
})
describe("oneOf", () => {
it("successfully finds a row", () =>
- expectQuery({ oneOf: { name: ["foo"] } }).toFind([{ name: "foo" }]))
+ expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([
+ { name: "foo" },
+ ]))
it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { name: ["none"] } }).toFindNothing())
@@ -150,11 +219,69 @@ describe.each([
describe("fuzzy", () => {
it("successfully finds a row", () =>
- expectQuery({ fuzzy: { name: "oo" } }).toFind([{ name: "foo" }]))
+ expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([
+ { name: "foo" },
+ ]))
it("fails to find nonexistent row", () =>
expectQuery({ fuzzy: { name: "none" } }).toFindNothing())
})
+
+ describe("range", () => {
+ it("successfully finds multiple rows", () =>
+ expectQuery({
+ range: { name: { low: "a", high: "z" } },
+ }).toContainExactly([{ name: "bar" }, { name: "foo" }]))
+
+ it("successfully finds a row with a high bound", () =>
+ expectQuery({
+ range: { name: { low: "a", high: "c" } },
+ }).toContainExactly([{ name: "bar" }]))
+
+ it("successfully finds a row with a low bound", () =>
+ expectQuery({
+ range: { name: { low: "f", high: "z" } },
+ }).toContainExactly([{ name: "foo" }]))
+
+ it("successfully finds no rows", () =>
+ expectQuery({
+ range: { name: { low: "g", high: "h" } },
+ }).toFindNothing())
+ })
+
+ describe("sort", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
+
+ describe("sortType STRING", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ name: "bar" }, { name: "foo" }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "name",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ name: "foo" }, { name: "bar" }]))
+ })
+ })
})
describe("numbers", () => {
@@ -167,7 +294,7 @@ describe.each([
describe("equal", () => {
it("successfully finds a row", () =>
- expectQuery({ equal: { age: 1 } }).toFind([{ age: 1 }]))
+ expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }]))
it("fails to find nonexistent row", () =>
expectQuery({ equal: { age: 2 } }).toFindNothing())
@@ -175,15 +302,15 @@ describe.each([
describe("notEqual", () => {
it("successfully finds a row", () =>
- expectQuery({ notEqual: { age: 1 } }).toFind([{ age: 10 }]))
+ expectQuery({ notEqual: { age: 1 } }).toContainExactly([{ age: 10 }]))
it("fails to find nonexistent row", () =>
- expectQuery({ notEqual: { age: 10 } }).toFind([{ age: 1 }]))
+ expectQuery({ notEqual: { age: 10 } }).toContainExactly([{ age: 1 }]))
})
describe("oneOf", () => {
it("successfully finds a row", () =>
- expectQuery({ oneOf: { age: [1] } }).toFind([{ age: 1 }]))
+ expectQuery({ oneOf: { age: [1] } }).toContainExactly([{ age: 1 }]))
it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { age: [2] } }).toFindNothing())
@@ -193,17 +320,56 @@ describe.each([
it("successfully finds a row", () =>
expectQuery({
range: { age: { low: 1, high: 5 } },
- }).toFind([{ age: 1 }]))
+ }).toContainExactly([{ age: 1 }]))
it("successfully finds multiple rows", () =>
expectQuery({
range: { age: { low: 1, high: 10 } },
- }).toFind([{ age: 1 }, { age: 10 }]))
+ }).toContainExactly([{ age: 1 }, { age: 10 }]))
it("successfully finds a row with a high bound", () =>
expectQuery({
range: { age: { low: 5, high: 10 } },
- }).toFind([{ age: 10 }]))
+ }).toContainExactly([{ age: 10 }]))
+
+ it("successfully finds no rows", () =>
+ expectQuery({
+ range: { age: { low: 5, high: 9 } },
+ }).toFindNothing())
+ })
+
+ describe("sort", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ age: 1 }, { age: 10 }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ age: 10 }, { age: 1 }]))
+ })
+
+ describe("sortType NUMBER", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortType: SortType.NUMBER,
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ age: 1 }, { age: 10 }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "age",
+ sortType: SortType.NUMBER,
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ age: 10 }, { age: 1 }]))
})
})
@@ -211,6 +377,7 @@ describe.each([
const JAN_1ST = "2020-01-01T00:00:00.000Z"
const JAN_2ND = "2020-01-02T00:00:00.000Z"
const JAN_5TH = "2020-01-05T00:00:00.000Z"
+ const JAN_9TH = "2020-01-09T00:00:00.000Z"
const JAN_10TH = "2020-01-10T00:00:00.000Z"
beforeAll(async () => {
@@ -223,7 +390,9 @@ describe.each([
describe("equal", () => {
it("successfully finds a row", () =>
- expectQuery({ equal: { dob: JAN_1ST } }).toFind([{ dob: JAN_1ST }]))
+ expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([
+ { dob: JAN_1ST },
+ ]))
it("fails to find nonexistent row", () =>
expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing())
@@ -231,15 +400,21 @@ describe.each([
describe("notEqual", () => {
it("successfully finds a row", () =>
- expectQuery({ notEqual: { dob: JAN_1ST } }).toFind([{ dob: JAN_10TH }]))
+ expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([
+ { dob: JAN_10TH },
+ ]))
it("fails to find nonexistent row", () =>
- expectQuery({ notEqual: { dob: JAN_10TH } }).toFind([{ dob: JAN_1ST }]))
+ expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([
+ { dob: JAN_1ST },
+ ]))
})
describe("oneOf", () => {
it("successfully finds a row", () =>
- expectQuery({ oneOf: { dob: [JAN_1ST] } }).toFind([{ dob: JAN_1ST }]))
+ expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([
+ { dob: JAN_1ST },
+ ]))
it("fails to find nonexistent row", () =>
expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing())
@@ -249,17 +424,130 @@ describe.each([
it("successfully finds a row", () =>
expectQuery({
range: { dob: { low: JAN_1ST, high: JAN_5TH } },
- }).toFind([{ dob: JAN_1ST }]))
+ }).toContainExactly([{ dob: JAN_1ST }]))
it("successfully finds multiple rows", () =>
expectQuery({
range: { dob: { low: JAN_1ST, high: JAN_10TH } },
- }).toFind([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
+ }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
it("successfully finds a row with a high bound", () =>
expectQuery({
range: { dob: { low: JAN_5TH, high: JAN_10TH } },
- }).toFind([{ dob: JAN_10TH }]))
+ }).toContainExactly([{ dob: JAN_10TH }]))
+
+ it("successfully finds no rows", () =>
+ expectQuery({
+ range: { dob: { low: JAN_5TH, high: JAN_9TH } },
+ }).toFindNothing())
+ })
+
+ describe("sort", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
+
+ describe("sortType STRING", () => {
+ it("sorts ascending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.ASCENDING,
+ }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]))
+
+ it("sorts descending", () =>
+ expectSearch({
+ query: {},
+ sort: "dob",
+ sortType: SortType.STRING,
+ sortOrder: SortOrder.DESCENDING,
+ }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]))
+ })
+ })
+ })
+
+ describe("array of strings", () => {
+ beforeAll(async () => {
+ await createTable({
+ numbers: {
+ name: "numbers",
+ type: FieldType.ARRAY,
+ constraints: { inclusion: ["one", "two", "three"] },
+ },
+ })
+ await createRows([{ numbers: ["one", "two"] }, { numbers: ["three"] }])
+ })
+
+ describe("contains", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ contains: { numbers: ["none"] } }).toFindNothing())
+
+ it("fails to find row containing all", () =>
+ expectQuery({
+ contains: { numbers: ["one", "two", "three"] },
+ }).toFindNothing())
+
+ it("finds all with empty list", () =>
+ expectQuery({ contains: { numbers: [] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+ })
+
+ describe("notContains", () => {
+ it("successfully finds a row", () =>
+ expectQuery({ notContains: { numbers: ["one"] } }).toContainExactly([
+ { numbers: ["three"] },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({
+ notContains: { numbers: ["one", "two", "three"] },
+ }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+
+ it("finds all with empty list", () =>
+ expectQuery({ notContains: { numbers: [] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+ })
+
+ describe("containsAny", () => {
+ it("successfully finds rows", () =>
+ expectQuery({
+ containsAny: { numbers: ["one", "two", "three"] },
+ }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
+
+ it("fails to find nonexistent row", () =>
+ expectQuery({ containsAny: { numbers: ["none"] } }).toFindNothing())
+
+ it("finds all with empty list", () =>
+ expectQuery({ containsAny: { numbers: [] } }).toContainExactly([
+ { numbers: ["one", "two"] },
+ { numbers: ["three"] },
+ ]))
})
})
})
diff --git a/packages/server/src/constants/index.ts b/packages/server/src/constants/index.ts
index 42a1b53224..37c275c8a3 100644
--- a/packages/server/src/constants/index.ts
+++ b/packages/server/src/constants/index.ts
@@ -20,6 +20,7 @@ export enum FilterTypes {
NOT_EMPTY = "notEmpty",
CONTAINS = "contains",
NOT_CONTAINS = "notContains",
+ CONTAINS_ANY = "containsAny",
ONE_OF = "oneOf",
}
@@ -30,6 +31,7 @@ export const NoEmptyFilterStrings = [
FilterTypes.NOT_EQUAL,
FilterTypes.CONTAINS,
FilterTypes.NOT_CONTAINS,
+ FilterTypes.CONTAINS_ANY,
]
export const CanSwitchTypes = [
diff --git a/packages/server/src/db/utils.ts b/packages/server/src/db/utils.ts
index b1c02b1764..ce8d0accbb 100644
--- a/packages/server/src/db/utils.ts
+++ b/packages/server/src/db/utils.ts
@@ -40,6 +40,7 @@ export const USER_METDATA_PREFIX = `${DocumentType.ROW}${SEPARATOR}${dbCore.Inte
export const LINK_USER_METADATA_PREFIX = `${DocumentType.LINK}${SEPARATOR}${dbCore.InternalTable.USER_METADATA}${SEPARATOR}`
export const TABLE_ROW_PREFIX = `${DocumentType.ROW}${SEPARATOR}${DocumentType.TABLE}`
export const AUTOMATION_LOG_PREFIX = `${DocumentType.AUTOMATION_LOG}${SEPARATOR}`
+export const SQS_DATASOURCE_INTERNAL = "internal"
export const ViewName = dbCore.ViewName
export const InternalTables = dbCore.InternalTable
export const UNICODE_MAX = dbCore.UNICODE_MAX
diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts
index f8adcbe0ee..d9d299d5fa 100644
--- a/packages/server/src/environment.ts
+++ b/packages/server/src/environment.ts
@@ -28,6 +28,7 @@ const DEFAULTS = {
PLUGINS_DIR: "/plugins",
FORKED_PROCESS_NAME: "main",
JS_RUNNER_MEMORY_LIMIT: 64,
+ COUCH_DB_SQL_URL: "http://localhost:4006",
}
const QUERY_THREAD_TIMEOUT =
@@ -39,6 +40,7 @@ const environment = {
// important - prefer app port to generic port
PORT: process.env.APP_PORT || process.env.PORT,
COUCH_DB_URL: process.env.COUCH_DB_URL,
+ COUCH_DB_SQL_URL: process.env.COUCH_DB_SQL_URL || DEFAULTS.COUCH_DB_SQL_URL,
MINIO_URL: process.env.MINIO_URL,
WORKER_URL: process.env.WORKER_URL,
AWS_REGION: process.env.AWS_REGION,
diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts
index f5828f9419..59684422e7 100644
--- a/packages/server/src/integrations/base/sql.ts
+++ b/packages/server/src/integrations/base/sql.ts
@@ -22,6 +22,8 @@ import {
SortDirection,
SqlQueryBinding,
Table,
+ TableSourceType,
+ INTERNAL_TABLE_SOURCE_ID,
} from "@budibase/types"
import environment from "../../environment"
@@ -135,6 +137,18 @@ function generateSelectStatement(
})
}
+function getTableName(table?: Table): string | undefined {
+ // SQS uses the table ID rather than the table name
+ if (
+ table?.sourceType === TableSourceType.INTERNAL ||
+ table?.sourceId === INTERNAL_TABLE_SOURCE_ID
+ ) {
+ return table?._id
+ } else {
+ return table?.name
+ }
+}
+
class InternalBuilder {
private readonly client: string
@@ -149,7 +163,7 @@ class InternalBuilder {
tableName: string,
opts: { aliases?: Record; relationship?: boolean }
): Knex.QueryBuilder {
- function getTableName(name: string) {
+ function getTableAlias(name: string) {
const alias = opts.aliases?.[name]
return alias || name
}
@@ -161,11 +175,11 @@ class InternalBuilder {
const updatedKey = dbCore.removeKeyNumbering(key)
const isRelationshipField = updatedKey.includes(".")
if (!opts.relationship && !isRelationshipField) {
- fn(`${getTableName(tableName)}.${updatedKey}`, value)
+ fn(`${getTableAlias(tableName)}.${updatedKey}`, value)
}
if (opts.relationship && isRelationshipField) {
const [filterTableName, property] = updatedKey.split(".")
- fn(`${getTableName(filterTableName)}.${property}`, value)
+ fn(`${getTableAlias(filterTableName)}.${property}`, value)
}
}
}
@@ -233,6 +247,11 @@ class InternalBuilder {
(statement ? andOr : "") +
`LOWER(${likeKey(this.client, key)}) LIKE ?`
}
+
+ if (statement === "") {
+ return
+ }
+
// @ts-ignore
query = query[rawFnc](`${not}(${statement})`, value)
})
@@ -341,9 +360,10 @@ class InternalBuilder {
addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder {
let { sort, paginate } = json
const table = json.meta?.table
+ const tableName = getTableName(table)
const aliases = json.tableAliases
const aliased =
- table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name
+ tableName && aliases?.[tableName] ? aliases[tableName] : table?.name
if (sort && Object.keys(sort || {}).length > 0) {
for (let [key, value] of Object.entries(sort)) {
const direction =
@@ -724,12 +744,13 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
results: Record[],
aliases?: Record
): Record[] {
+ const tableName = getTableName(table)
for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) {
continue
}
- const tableName = aliases?.[table.name] || table.name
- const fullName = `${tableName}.${name}`
+ const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
+ const fullName = `${aliasedTableName}.${name}`
for (let row of results) {
if (typeof row[fullName] === "string") {
row[fullName] = JSON.parse(row[fullName])
diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts
index 4ff336421f..3c55d75b8b 100644
--- a/packages/server/src/integrations/base/sqlTable.ts
+++ b/packages/server/src/integrations/base/sqlTable.ts
@@ -1,19 +1,20 @@
import { Knex, knex } from "knex"
import {
- RelationshipType,
FieldSubtype,
+ FieldType,
NumberFieldMetadata,
Operation,
QueryJson,
+ RelationshipType,
RenameColumn,
- Table,
- FieldType,
SqlQuery,
+ Table,
+ TableSourceType,
} from "@budibase/types"
import { breakExternalTableId, getNativeSql, SqlClient } from "../utils"
+import { utils } from "@budibase/shared-core"
import SchemaBuilder = Knex.SchemaBuilder
import CreateTableBuilder = Knex.CreateTableBuilder
-import { utils } from "@budibase/shared-core"
function isIgnoredType(type: FieldType) {
const ignored = [FieldType.LINK, FieldType.FORMULA]
@@ -105,13 +106,13 @@ function generateSchema(
column.relationshipType !== RelationshipType.MANY_TO_MANY
) {
if (!column.foreignKey || !column.tableId) {
- throw "Invalid relationship schema"
+ throw new Error("Invalid relationship schema")
}
const { tableName } = breakExternalTableId(column.tableId)
// @ts-ignore
const relatedTable = tables[tableName]
if (!relatedTable) {
- throw "Referenced table doesn't exist"
+ throw new Error("Referenced table doesn't exist")
}
const relatedPrimary = relatedTable.primary[0]
const externalType = relatedTable.schema[relatedPrimary].externalType
@@ -209,15 +210,19 @@ class SqlTableQueryBuilder {
let query: Knex.SchemaBuilder
if (!json.table || !json.meta || !json.meta.tables) {
- throw "Cannot execute without table being specified"
+ throw new Error("Cannot execute without table being specified")
}
+ if (json.table.sourceType === TableSourceType.INTERNAL) {
+ throw new Error("Cannot perform table actions for SQS.")
+ }
+
switch (this._operation(json)) {
case Operation.CREATE_TABLE:
query = buildCreateTable(client, json.table, json.meta.tables)
break
case Operation.UPDATE_TABLE:
if (!json.meta || !json.meta.table) {
- throw "Must specify old table for update"
+ throw new Error("Must specify old table for update")
}
// renameColumn does not work for MySQL, so return a raw query
if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) {
@@ -264,7 +269,7 @@ class SqlTableQueryBuilder {
query = buildDeleteTable(client, json.table)
break
default:
- throw "Table operation is of unknown type"
+ throw new Error("Table operation is of unknown type")
}
return getNativeSql(query)
}
diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts
index dc2a06446b..4ee544cc5e 100644
--- a/packages/server/src/integrations/tests/sql.spec.ts
+++ b/packages/server/src/integrations/tests/sql.spec.ts
@@ -9,6 +9,14 @@ import {
} from "@budibase/types"
const TABLE_NAME = "test"
+const TABLE: Table = {
+ type: "table",
+ sourceType: TableSourceType.EXTERNAL,
+ sourceId: "SOURCE_ID",
+ schema: {},
+ name: TABLE_NAME,
+ primary: ["id"],
+}
function endpoint(table: any, operation: any) {
return {
@@ -25,6 +33,10 @@ function generateReadJson({
sort,
paginate,
}: any = {}): QueryJson {
+ const tableObj = { ...TABLE }
+ if (table) {
+ tableObj.name = table
+ }
return {
endpoint: endpoint(table || TABLE_NAME, "READ"),
resource: {
@@ -34,14 +46,7 @@ function generateReadJson({
sort: sort || {},
paginate: paginate || {},
meta: {
- table: {
- type: "table",
- sourceType: TableSourceType.EXTERNAL,
- sourceId: "SOURCE_ID",
- schema: {},
- name: table || TABLE_NAME,
- primary: ["id"],
- } as any,
+ table: tableObj,
},
}
}
@@ -49,6 +54,9 @@ function generateReadJson({
function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson {
return {
endpoint: endpoint(table, "CREATE"),
+ meta: {
+ table: TABLE,
+ },
body,
}
}
@@ -58,7 +66,15 @@ function generateUpdateJson({
body = {},
filters = {},
meta = {},
+}: {
+ table: string
+ body?: any
+ filters?: any
+ meta?: any
}): QueryJson {
+ if (!meta.table) {
+ meta.table = table
+ }
return {
endpoint: endpoint(table, "UPDATE"),
filters,
@@ -70,6 +86,9 @@ function generateUpdateJson({
function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson {
return {
endpoint: endpoint(table, "DELETE"),
+ meta: {
+ table: TABLE,
+ },
filters,
}
}
@@ -102,6 +121,9 @@ function generateRelationshipJson(config: { schema?: string } = {}): QueryJson {
},
],
extra: { idFilter: {} },
+ meta: {
+ table: TABLE,
+ },
}
}
diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts
index bfca24ff7d..f4edab8dad 100644
--- a/packages/server/src/integrations/tests/sqlAlias.spec.ts
+++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts
@@ -4,12 +4,26 @@ import {
QueryJson,
SourceName,
SqlQuery,
+ Table,
+ TableSourceType,
} from "@budibase/types"
import { join } from "path"
import Sql from "../base/sql"
import { SqlClient } from "../utils"
-import AliasTables from "../../api/controllers/row/alias"
import { generator } from "@budibase/backend-core/tests"
+import sdk from "../../sdk"
+
+// this doesn't exist strictly
+const TABLE: Table = {
+ type: "table",
+ sourceType: TableSourceType.EXTERNAL,
+ sourceId: "SOURCE_ID",
+ schema: {},
+ name: "tableName",
+ primary: ["id"],
+}
+
+const AliasTables = sdk.rows.AliasTables
function multiline(sql: string) {
return sql.replace(/\n/g, "").replace(/ +/g, " ")
@@ -220,6 +234,9 @@ describe("Captures of real examples", () => {
resource: {
fields,
},
+ meta: {
+ table: TABLE,
+ },
}
}
diff --git a/packages/server/src/sdk/app/rows/index.ts b/packages/server/src/sdk/app/rows/index.ts
index ea501e93d9..c117941419 100644
--- a/packages/server/src/sdk/app/rows/index.ts
+++ b/packages/server/src/sdk/app/rows/index.ts
@@ -3,6 +3,7 @@ import * as rows from "./rows"
import * as search from "./search"
import * as utils from "./utils"
import * as external from "./external"
+import AliasTables from "./sqlAlias"
export default {
...attachments,
@@ -10,4 +11,5 @@ export default {
...search,
utils,
external,
+ AliasTables,
}
diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts
index f681bfeb90..e347a8657d 100644
--- a/packages/server/src/sdk/app/rows/search.ts
+++ b/packages/server/src/sdk/app/rows/search.ts
@@ -13,6 +13,8 @@ import * as sqs from "./search/sqs"
import env from "../../../environment"
import { ExportRowsParams, ExportRowsResult } from "./search/types"
import { dataFilters } from "@budibase/shared-core"
+import sdk from "../../index"
+import { searchInputMapping } from "./search/utils"
export { isValidFilter } from "../../../integrations/utils"
@@ -29,6 +31,10 @@ function pickApi(tableId: any) {
return internal
}
+function isEmptyArray(value: any) {
+ return Array.isArray(value) && value.length === 0
+}
+
// don't do a pure falsy check, as 0 is included
// https://github.com/Budibase/budibase/issues/10118
export function removeEmptyFilters(filters: SearchFilters) {
@@ -47,7 +53,7 @@ export function removeEmptyFilters(filters: SearchFilters) {
for (let [key, value] of Object.entries(
filters[filterType] as object
)) {
- if (value == null || value === "") {
+ if (value == null || value === "" || isEmptyArray(value)) {
// @ts-ignore
delete filters[filterField][key]
}
@@ -72,12 +78,15 @@ export async function search(
}
}
+ const table = await sdk.tables.getTable(options.tableId)
+ options = searchInputMapping(table, options)
+
if (isExternalTable) {
- return external.search(options)
+ return external.search(options, table)
} else if (env.SQS_SEARCH_ENABLE) {
- return sqs.search(options)
+ return sqs.search(options, table)
} else {
- return internal.search(options)
+ return internal.search(options, table)
}
}
diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts
index e0a3bad94e..077f971903 100644
--- a/packages/server/src/sdk/app/rows/search/external.ts
+++ b/packages/server/src/sdk/app/rows/search/external.ts
@@ -8,6 +8,7 @@ import {
SearchFilters,
RowSearchParams,
SearchResponse,
+ Table,
} from "@budibase/types"
import * as exporters from "../../../../api/controllers/view/exporters"
import { handleRequest } from "../../../../api/controllers/row/external"
@@ -18,13 +19,13 @@ import {
import { utils } from "@budibase/shared-core"
import { ExportRowsParams, ExportRowsResult } from "./types"
import { HTTPError, db } from "@budibase/backend-core"
-import { searchInputMapping } from "./utils"
import pick from "lodash/pick"
import { outputProcessing } from "../../../../utilities/rowProcessor"
import sdk from "../../../"
export async function search(
- options: RowSearchParams
+ options: RowSearchParams,
+ table: Table
): Promise> {
const { tableId } = options
const { paginate, query, ...params } = options
@@ -68,8 +69,6 @@ export async function search(
}
try {
- const table = await sdk.tables.getTable(tableId)
- options = searchInputMapping(table, options)
let rows = await handleRequest(Operation.READ, tableId, {
filters: query,
sort,
@@ -150,11 +149,15 @@ export async function exportRows(
}
const datasource = await sdk.datasources.get(datasourceId!)
+ const table = await sdk.tables.getTable(tableId)
if (!datasource || !datasource.entities) {
throw new HTTPError("Datasource has not been configured for plus API.", 400)
}
- let result = await search({ tableId, query: requestQuery, sort, sortOrder })
+ let result = await search(
+ { tableId, query: requestQuery, sort, sortOrder },
+ table
+ )
let rows: Row[] = []
let headers
diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts
index 610807a10e..906ca016d1 100644
--- a/packages/server/src/sdk/app/rows/search/internal.ts
+++ b/packages/server/src/sdk/app/rows/search/internal.ts
@@ -1,6 +1,6 @@
import { context, db, HTTPError } from "@budibase/backend-core"
import env from "../../../../environment"
-import { fullSearch, paginatedSearch, searchInputMapping } from "./utils"
+import { fullSearch, paginatedSearch } from "./utils"
import { getRowParams, InternalTables } from "../../../../db/utils"
import {
Database,
@@ -33,7 +33,8 @@ import pick from "lodash/pick"
import { breakRowIdField } from "../../../../integrations/utils"
export async function search(
- options: RowSearchParams
+ options: RowSearchParams,
+ table: Table
): Promise> {
const { tableId } = options
@@ -51,8 +52,6 @@ export async function search(
query: {},
}
- let table = await sdk.tables.getTable(tableId)
- options = searchInputMapping(table, options)
if (params.sort && !params.sortType) {
const schema = table.schema
const sortField = schema[params.sort]
@@ -122,12 +121,15 @@ export async function exportRows(
result = await outputProcessing(table, response)
} else if (query) {
- let searchResponse = await search({
- tableId,
- query,
- sort,
- sortOrder,
- })
+ let searchResponse = await search(
+ {
+ tableId,
+ query,
+ sort,
+ sortOrder,
+ },
+ table
+ )
result = searchResponse.rows
}
diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts
index 5b0b6e3bc7..36576c58a3 100644
--- a/packages/server/src/sdk/app/rows/search/sqs.ts
+++ b/packages/server/src/sdk/app/rows/search/sqs.ts
@@ -20,7 +20,12 @@ import {
} from "../../../../api/controllers/row/utils"
import sdk from "../../../index"
import { context } from "@budibase/backend-core"
-import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
+import {
+ CONSTANT_INTERNAL_ROW_COLS,
+ SQS_DATASOURCE_INTERNAL,
+} from "../../../../db/utils"
+import AliasTables from "../sqlAlias"
+import { outputProcessing } from "../../../../utilities/rowProcessor"
function buildInternalFieldList(
table: Table,
@@ -31,19 +36,19 @@ function buildInternalFieldList(
fieldList = fieldList.concat(
CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`)
)
- if (opts.relationships) {
- for (let col of Object.values(table.schema)) {
- if (col.type === FieldType.LINK) {
- const linkCol = col as RelationshipFieldMetadata
- const relatedTable = tables.find(
- table => table._id === linkCol.tableId
- )!
- fieldList = fieldList.concat(
- buildInternalFieldList(relatedTable, tables, { relationships: false })
- )
- } else {
- fieldList.push(`${table._id}.${col.name}`)
- }
+ for (let col of Object.values(table.schema)) {
+ const isRelationship = col.type === FieldType.LINK
+ if (!opts.relationships && isRelationship) {
+ continue
+ }
+ if (isRelationship) {
+ const linkCol = col as RelationshipFieldMetadata
+ const relatedTable = tables.find(table => table._id === linkCol.tableId)!
+ fieldList = fieldList.concat(
+ buildInternalFieldList(relatedTable, tables, { relationships: false })
+ )
+ } else {
+ fieldList.push(`${table._id}.${col.name}`)
}
}
return fieldList
@@ -94,14 +99,14 @@ function buildTableMap(tables: Table[]) {
}
export async function search(
- options: RowSearchParams
+ options: RowSearchParams,
+ table: Table
): Promise> {
- const { tableId, paginate, query, ...params } = options
+ const { paginate, query, ...params } = options
const builder = new SqlQueryBuilder(SqlClient.SQL_LITE)
const allTables = await sdk.tables.getAllInternalTables()
const allTablesMap = buildTableMap(allTables)
- const table = allTables.find(table => table._id === tableId)
if (!table) {
throw new Error("Unable to find table")
}
@@ -111,7 +116,7 @@ export async function search(
const request: QueryJson = {
endpoint: {
// not important, we query ourselves
- datasourceId: "internal",
+ datasourceId: SQS_DATASOURCE_INTERNAL,
entityId: table._id!,
operation: Operation.READ,
},
@@ -132,7 +137,7 @@ export async function search(
type: "row",
}
- if (params.sort && !params.sortType) {
+ if (params.sort) {
const sortField = table.schema[params.sort]
const sortType =
sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING
@@ -154,34 +159,44 @@ export async function search(
}
}
try {
- const query = builder._query(request, {
- disableReturning: true,
+ const alias = new AliasTables(allTables.map(table => table.name))
+ const rows = await alias.queryWithAliasing(request, async json => {
+ const query = builder._query(json, {
+ disableReturning: true,
+ })
+
+ if (Array.isArray(query)) {
+ throw new Error("SQS cannot currently handle multiple queries")
+ }
+
+ let sql = query.sql,
+ bindings = query.bindings
+
+ // quick hack for docIds
+ sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
+ sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`")
+
+ const db = context.getAppDB()
+ return await db.sql(sql, bindings)
})
- if (Array.isArray(query)) {
- throw new Error("SQS cannot currently handle multiple queries")
- }
-
- let sql = query.sql,
- bindings = query.bindings
-
- // quick hack for docIds
- sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`")
- sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`")
-
- const db = context.getAppDB()
- const rows = await db.sql(sql, bindings)
+ // process from the format of tableId.column to expected format
+ const processed = await sqlOutputProcessing(
+ rows,
+ table!,
+ allTablesMap,
+ relationships,
+ {
+ sqs: true,
+ }
+ )
return {
- rows: await sqlOutputProcessing(
- rows,
- table!,
- allTablesMap,
- relationships,
- {
- sqs: true,
- }
- ),
+ // final row processing for response
+ rows: await outputProcessing(table, processed, {
+ preserveLinks: true,
+ squash: true,
+ }),
}
} catch (err: any) {
const msg = typeof err === "string" ? err : err.message
diff --git a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts
index f2bdec4692..53bc049a9b 100644
--- a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts
+++ b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts
@@ -112,7 +112,7 @@ describe("external search", () => {
tableId,
query: {},
}
- const result = await search(searchParams)
+ const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
@@ -130,7 +130,7 @@ describe("external search", () => {
query: {},
fields: ["name", "age"],
}
- const result = await search(searchParams)
+ const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
@@ -157,7 +157,7 @@ describe("external search", () => {
},
},
}
- const result = await search(searchParams)
+ const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(3)
expect(result.rows.map(row => row.id)).toEqual([1, 4, 8])
diff --git a/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts b/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts
index 5be0f4a258..1c5f396737 100644
--- a/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts
+++ b/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts
@@ -81,7 +81,7 @@ describe("internal", () => {
tableId,
query: {},
}
- const result = await search(searchParams)
+ const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
@@ -99,7 +99,7 @@ describe("internal", () => {
query: {},
fields: ["name", "age"],
}
- const result = await search(searchParams)
+ const result = await search(searchParams, config.table!)
expect(result.rows).toHaveLength(10)
expect(result.rows).toEqual(
diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/sdk/app/rows/sqlAlias.ts
similarity index 87%
rename from packages/server/src/api/controllers/row/alias.ts
rename to packages/server/src/sdk/app/rows/sqlAlias.ts
index 0ec9d1a09c..0fc338ecbe 100644
--- a/packages/server/src/api/controllers/row/alias.ts
+++ b/packages/server/src/sdk/app/rows/sqlAlias.ts
@@ -6,11 +6,12 @@ import {
Row,
SearchFilters,
} from "@budibase/types"
-import { getSQLClient } from "../../../sdk/app/rows/utils"
+import { getSQLClient } from "./utils"
import { cloneDeep } from "lodash"
-import sdk from "../../../sdk"
+import datasources from "../datasources"
import { makeExternalQuery } from "../../../integrations/base/query"
import { SqlClient } from "../../../integrations/utils"
+import { SQS_DATASOURCE_INTERNAL } from "../../../db/utils"
const WRITE_OPERATIONS: Operation[] = [
Operation.CREATE,
@@ -156,12 +157,19 @@ export default class AliasTables {
}
async queryWithAliasing(
- json: QueryJson
+ json: QueryJson,
+ queryFn?: (json: QueryJson) => Promise
): Promise {
const datasourceId = json.endpoint.datasourceId
- const datasource = await sdk.datasources.get(datasourceId)
+ const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL
+ let aliasingEnabled: boolean, datasource: Datasource | undefined
+ if (isSqs) {
+ aliasingEnabled = true
+ } else {
+ datasource = await datasources.get(datasourceId)
+ aliasingEnabled = this.isAliasingEnabled(json, datasource)
+ }
- const aliasingEnabled = this.isAliasingEnabled(json, datasource)
if (aliasingEnabled) {
json = cloneDeep(json)
// run through the query json to update anywhere a table may be used
@@ -207,7 +215,15 @@ export default class AliasTables {
}
json.tableAliases = invertedTableAliases
}
- const response = await makeExternalQuery(datasource, json)
+
+ let response: DatasourcePlusQueryResponse
+ if (datasource && !isSqs) {
+ response = await makeExternalQuery(datasource, json)
+ } else if (queryFn) {
+ response = await queryFn(json)
+ } else {
+ throw new Error("No supplied method to perform aliased query")
+ }
if (Array.isArray(response) && aliasingEnabled) {
return this.reverse(response)
} else {
diff --git a/packages/server/src/sdk/app/tables/internal/sqs.ts b/packages/server/src/sdk/app/tables/internal/sqs.ts
index 79d9be2348..99240c28d4 100644
--- a/packages/server/src/sdk/app/tables/internal/sqs.ts
+++ b/packages/server/src/sdk/app/tables/internal/sqs.ts
@@ -1,8 +1,19 @@
import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
-import { FieldType, SQLiteDefinition, SQLiteType, Table } from "@budibase/types"
+import {
+ FieldType,
+ RelationshipFieldMetadata,
+ SQLiteDefinition,
+ SQLiteTable,
+ SQLiteTables,
+ SQLiteType,
+ Table,
+} from "@budibase/types"
import { cloneDeep } from "lodash"
import tablesSdk from "../"
-import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils"
+import {
+ CONSTANT_INTERNAL_ROW_COLS,
+ generateJunctionTableID,
+} from "../../../../db/utils"
const BASIC_SQLITE_DOC: SQLiteDefinition = {
_id: SQLITE_DESIGN_DOC_ID,
@@ -36,9 +47,42 @@ const FieldTypeMap: Record = {
[FieldType.BB_REFERENCE]: SQLiteType.TEXT,
}
-function mapTable(table: Table): { [key: string]: SQLiteType } {
+function buildRelationshipDefinitions(
+ table: Table,
+ relationshipColumn: RelationshipFieldMetadata
+): {
+ tableId: string
+ definition: SQLiteTable
+} {
+ const tableId = table._id!,
+ relatedTableId = relationshipColumn.tableId
+ return {
+ tableId: generateJunctionTableID(tableId, relatedTableId),
+ definition: {
+ ["doc1.rowId"]: SQLiteType.TEXT,
+ ["doc1.tableId"]: SQLiteType.TEXT,
+ ["doc1.fieldName"]: SQLiteType.TEXT,
+ ["doc2.rowId"]: SQLiteType.TEXT,
+ ["doc2.tableId"]: SQLiteType.TEXT,
+ ["doc2.fieldName"]: SQLiteType.TEXT,
+ tableId: SQLiteType.TEXT,
+ },
+ }
+}
+
+// this can generate relationship tables as part of the mapping
+function mapTable(table: Table): SQLiteTables {
+ const tables: SQLiteTables = {}
const fields: Record = {}
for (let [key, column] of Object.entries(table.schema)) {
+ // relationships should be handled differently
+ if (column.type === FieldType.LINK) {
+ const { tableId, definition } = buildRelationshipDefinitions(
+ table,
+ column
+ )
+ tables[tableId] = { fields: definition }
+ }
if (!FieldTypeMap[column.type]) {
throw new Error(`Unable to map type "${column.type}" to SQLite type`)
}
@@ -49,10 +93,12 @@ function mapTable(table: Table): { [key: string]: SQLiteType } {
CONSTANT_INTERNAL_ROW_COLS.forEach(col => {
constantMap[col] = SQLiteType.TEXT
})
- return {
+ const thisTable: SQLiteTable = {
...constantMap,
...fields,
}
+ tables[table._id!] = { fields: thisTable }
+ return tables
}
// nothing exists, need to iterate though existing tables
@@ -60,8 +106,9 @@ async function buildBaseDefinition(): Promise {
const tables = await tablesSdk.getAllInternalTables()
const definition = cloneDeep(BASIC_SQLITE_DOC)
for (let table of tables) {
- definition.sql.tables[table._id!] = {
- fields: mapTable(table),
+ definition.sql.tables = {
+ ...definition.sql.tables,
+ ...mapTable(table),
}
}
return definition
@@ -75,8 +122,9 @@ export async function addTableToSqlite(table: Table) {
} catch (err) {
definition = await buildBaseDefinition()
}
- definition.sql.tables[table._id!] = {
- fields: mapTable(table),
+ definition.sql.tables = {
+ ...definition.sql.tables,
+ ...mapTable(table),
}
await db.put(definition)
}
diff --git a/packages/server/src/tests/utilities/api/datasource.ts b/packages/server/src/tests/utilities/api/datasource.ts
index 0296f58f7d..0362a25940 100644
--- a/packages/server/src/tests/utilities/api/datasource.ts
+++ b/packages/server/src/tests/utilities/api/datasource.ts
@@ -60,7 +60,10 @@ export class DatasourceAPI extends TestAPI {
})
}
- query = async (query: QueryJson, expectations?: Expectations) => {
+ query = async (
+ query: Omit,
+ expectations?: Expectations
+ ) => {
return await this._post(`/api/datasources/query`, {
body: query,
expectations,
diff --git a/packages/types/src/documents/account/account.ts b/packages/types/src/documents/account/account.ts
index 2f74b9e7b3..239d845722 100644
--- a/packages/types/src/documents/account/account.ts
+++ b/packages/types/src/documents/account/account.ts
@@ -102,6 +102,7 @@ export function isVerifiableSSOProvider(provider: AccountSSOProvider): boolean {
}
export interface AccountSSO {
+ ssoId?: string
provider: AccountSSOProvider
providerType: AccountSSOProviderType
oauth2?: OAuthTokens
diff --git a/packages/types/src/documents/app/row.ts b/packages/types/src/documents/app/row.ts
index 222c346591..865ab4ba64 100644
--- a/packages/types/src/documents/app/row.ts
+++ b/packages/types/src/documents/app/row.ts
@@ -1,22 +1,111 @@
import { Document } from "../document"
export enum FieldType {
+ /**
+ * a primitive type, stores a string, called Text within Budibase. This is one of the default
+ * types of Budibase, if an external type is not fully understood, we will treat it as text.
+ */
STRING = "string",
+ /**
+ * similar to string type, called Long Form Text within Budibase. This is mainly a frontend
+ * orientated type which enables a larger text input area. This can also be used
+ * in conjunction with the 'useRichText' option to support a markdown editor/viewer.
+ */
LONGFORM = "longform",
+ /**
+ * similar to string type, called Options within Budibase. This works very similarly to
+ * the string type within the backend, but is validated to a list of options. This will
+ * display a