diff --git a/.eslintrc.json b/.eslintrc.json index 9dab2f1a88..f614f1ad91 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -92,7 +92,8 @@ // differs to external, but the API is broadly the same "jest/no-conditional-expect": "off", // have to turn this off to allow function overloading in typescript - "no-dupe-class-members": "off" + "no-dupe-class-members": "off", + "no-redeclare": "off" } }, { diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 7f1e08601a..d6e0432e83 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -73,9 +73,9 @@ jobs: - name: Check types run: | if ${{ env.USE_NX_AFFECTED }}; then - yarn check:types --since=${{ env.NX_BASE_BRANCH }} + yarn check:types --since=${{ env.NX_BASE_BRANCH }} --ignore @budibase/account-portal-server else - yarn check:types + yarn check:types --ignore @budibase/account-portal-server fi helm-lint: @@ -226,10 +226,11 @@ jobs: if: ${{ steps.get_pro_commits.outputs.base_commit_excluding_merges != '' }} run: | cd packages/pro + base_commit='${{ steps.get_pro_commits.outputs.base_commit }}' base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}' pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}' - any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit) + any_commit=$(git log --no-merges $base_commit...$pro_commit) if [ -n "$any_commit" ]; then echo $any_commit diff --git a/.github/workflows/force-release.yml b/.github/workflows/force-release.yml index 8a9d444f51..3d96d51484 100644 --- a/.github/workflows/force-release.yml +++ b/.github/workflows/force-release.yml @@ -9,7 +9,7 @@ on: jobs: ensure-is-master-tag: name: Ensure is a master tag - runs-on: qa-arc-runner-set + runs-on: ubuntu-latest steps: - name: Checkout monorepo uses: actions/checkout@v4 diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml new file mode 100644 index 0000000000..e2fa9f2515 --- /dev/null +++ b/.github/workflows/pr-labeler.yml @@ -0,0 +1,38 @@ +name: PR labeler + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number }} + cancel-in-progress: true + +on: + pull_request: + types: [opened, synchronize] + +jobs: + size-labeler: + runs-on: ubuntu-latest + steps: + - uses: codelytv/pr-size-labeler@v1 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + xs_max_size: "10" + s_max_size: "100" + m_max_size: "500" + l_max_size: "1000" + fail_if_xl: "false" + files_to_ignore: "yarn.lock" + + team-labeler: + runs-on: ubuntu-latest + if: ${{ github.event.action == 'opened' }} + steps: + - uses: rodrigoarias/auto-label-per-user@v1.0.0 + with: + git-token: ${{ secrets.GITHUB_TOKEN }} + user-team-map: | + { + "adrinr": "firestorm", + "samwho": "firestorm", + "PClmnt": "firestorm", + "mike12345567": "firestorm" + } diff --git a/.gitignore b/.gitignore index b68ddd975f..32d1416f4a 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,8 @@ bb-airgapped.tar.gz packages/server/build/oldClientVersions/**/* packages/builder/src/components/deploy/clientVersions.json +packages/server/src/integrations/tests/utils/*.lock + # Logs logs *.log diff --git a/charts/budibase/Chart.lock b/charts/budibase/Chart.lock index 3ee752a362..8ff1d36941 100644 --- a/charts/budibase/Chart.lock +++ b/charts/budibase/Chart.lock @@ -1,6 +1,6 @@ dependencies: - name: couchdb repository: https://apache.github.io/couchdb-helm - version: 4.3.0 -digest: sha256:94449a7f195b186f5af33ec5aa66d58b36bede240fae710f021ca87837b30606 -generated: "2023-11-20T17:43:02.777596Z" + version: 4.5.6 +digest: sha256:405f098633e632d6f4e140175f156ed4f02918b0d89193f1b66c9cbea211d6c9 +generated: "2024-06-05T14:41:05.979052+01:00" diff --git a/charts/budibase/Chart.yaml b/charts/budibase/Chart.yaml index e2c9378f2c..a16eba8982 100644 --- a/charts/budibase/Chart.yaml +++ b/charts/budibase/Chart.yaml @@ -17,6 +17,6 @@ version: 0.0.0 appVersion: 0.0.0 dependencies: - name: couchdb - version: 4.3.0 + version: 4.5.6 repository: https://apache.github.io/couchdb-helm condition: services.couchdb.enabled diff --git a/charts/budibase/README.md b/charts/budibase/README.md index 207992087d..c944ed1397 100644 --- a/charts/budibase/README.md +++ b/charts/budibase/README.md @@ -112,7 +112,9 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml | awsAlbIngress.enabled | bool | `false` | Whether to create an ALB Ingress resource pointing to the Budibase proxy. Requires the AWS ALB Ingress Controller. | | couchdb.clusterSize | int | `1` | The number of replicas to run in the CouchDB cluster. We set this to 1 by default to make things simpler, but you can set it to 3 if you need a high-availability CouchDB cluster. | | couchdb.couchdbConfig.couchdb.uuid | string | `"budibase-couchdb"` | Unique identifier for this CouchDB server instance. You shouldn't need to change this. | +| couchdb.extraPorts[0] | object | `{"containerPort":4984,"name":"sqs"}` | Extra ports to expose on the CouchDB service. We expose the SQS port by default, but you can add more ports here if you need to. | | couchdb.image | object | `{}` | We use a custom CouchDB image for running Budibase and we don't support using any other CouchDB image. You shouldn't change this, and if you do we can't guarantee that Budibase will work. | +| couchdb.service.extraPorts[0] | object | `{"name":"sqs","port":4984,"protocol":"TCP","targetPort":4984}` | Extra ports to expose on the CouchDB service. We expose the SQS port by default, but you can add more ports here if you need to. | | globals.apiEncryptionKey | string | `""` | Used for encrypting API keys and environment variables when stored in the database. You don't need to set this if `createSecrets` is true. | | globals.appVersion | string | `""` | The version of Budibase to deploy. Defaults to what's specified by {{ .Chart.AppVersion }}. Ends up being used as the image version tag for the apps, proxy, and worker images. | | globals.automationMaxIterations | string | `"200"` | The maximum number of iterations allows for an automation loop step. You can read more about looping here: . | @@ -135,6 +137,8 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml | globals.smtp.password | string | `""` | The password to use when authenticating with your SMTP server. | | globals.smtp.port | string | `"587"` | The port of your SMTP server. | | globals.smtp.user | string | `""` | The username to use when authenticating with your SMTP server. | +| globals.sqs.enabled | bool | `false` | Whether to use the CouchDB "structured query service" or not. This is disabled by default for now, but will become the default in a future release. | +| globals.tempBucketName | string | `""` | | | globals.tenantFeatureFlags | string | `"*:LICENSING,*:USER_GROUPS,*:ONBOARDING_TOUR"` | Sets what feature flags are enabled and for which tenants. Should not ordinarily need to be changed. | | imagePullSecrets | list | `[]` | Passed to all pods created by this chart. Should not ordinarily need to be changed. | | ingress.className | string | `""` | What ingress class to use. | @@ -152,6 +156,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml | services.apps.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the apps service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the apps pods. | | services.apps.extraContainers | list | `[]` | Additional containers to be added to the apps pod. | | services.apps.extraEnv | list | `[]` | Extra environment variables to set for apps pods. Takes a list of name=value pairs. | +| services.apps.extraEnvFromSecret | list | `[]` | Name of the K8s Secret in the same namespace which contains the extra environment variables. This can be used to avoid storing sensitive information in the values.yaml file. | | services.apps.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main apps container. | | services.apps.extraVolumes | list | `[]` | Additional volumes to the apps pod. | | services.apps.httpLogging | int | `1` | Whether or not to log HTTP requests to the apps service. | @@ -168,6 +173,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml | services.automationWorkers.enabled | bool | `true` | Whether or not to enable the automation worker service. If you disable this, automations will be processed by the apps service. | | services.automationWorkers.extraContainers | list | `[]` | Additional containers to be added to the automationWorkers pod. | | services.automationWorkers.extraEnv | list | `[]` | Extra environment variables to set for automation worker pods. Takes a list of name=value pairs. | +| services.automationWorkers.extraEnvFromSecret | list | `[]` | Name of the K8s Secret in the same namespace which contains the extra environment variables. This can be used to avoid storing sensitive information in the values.yaml file. | | services.automationWorkers.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main automationWorkers container. | | services.automationWorkers.extraVolumes | list | `[]` | Additional volumes to the automationWorkers pod. | | services.automationWorkers.livenessProbe | object | HTTP health checks. | Liveness probe configuration for automation worker pods. You shouldn't need to change this, but if you want to you can find more information here: | @@ -195,7 +201,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml | services.objectStore.region | string | `""` | AWS_REGION if using S3 | | services.objectStore.resources | object | `{}` | The resources to use for Minio pods. See for more information on how to set these. | | services.objectStore.secretKey | string | `""` | AWS_SECRET_ACCESS_KEY if using S3 | -| services.objectStore.storage | string | `"100Mi"` | How much storage to give Minio in its PersistentVolumeClaim. | +| services.objectStore.storage | string | `"2Gi"` | How much storage to give Minio in its PersistentVolumeClaim. | | services.objectStore.storageClass | string | `""` | If defined, storageClassName: If set to "-", storageClassName: "", which disables dynamic provisioning If undefined (the default) or set to null, no storageClassName spec is set, choosing the default provisioner. | | services.objectStore.url | string | `"http://minio-service:9000"` | URL to use for object storage. Only change this if you're using an external object store, such as S3. Remember to set `minio: false` if you do this. | | services.proxy.autoscaling.enabled | bool | `false` | Whether to enable horizontal pod autoscaling for the proxy service. | @@ -227,6 +233,7 @@ $ helm install --create-namespace --namespace budibase budibase . -f values.yaml | services.worker.autoscaling.targetCPUUtilizationPercentage | int | `80` | Target CPU utilization percentage for the worker service. Note that for autoscaling to work, you will need to have metrics-server configured, and resources set for the worker pods. | | services.worker.extraContainers | list | `[]` | Additional containers to be added to the worker pod. | | services.worker.extraEnv | list | `[]` | Extra environment variables to set for worker pods. Takes a list of name=value pairs. | +| services.worker.extraEnvFromSecret | list | `[]` | Name of the K8s Secret in the same namespace which contains the extra environment variables. This can be used to avoid storing sensitive information in the values.yaml file. | | services.worker.extraVolumeMounts | list | `[]` | Additional volumeMounts to the main worker container. | | services.worker.extraVolumes | list | `[]` | Additional volumes to the worker pod. | | services.worker.httpLogging | int | `1` | Whether or not to log HTTP requests to the worker service. | diff --git a/charts/budibase/charts/couchdb-4.3.0.tgz b/charts/budibase/charts/couchdb-4.3.0.tgz deleted file mode 100644 index d3cce28ee6..0000000000 Binary files a/charts/budibase/charts/couchdb-4.3.0.tgz and /dev/null differ diff --git a/charts/budibase/charts/couchdb-4.5.6.tgz b/charts/budibase/charts/couchdb-4.5.6.tgz new file mode 100644 index 0000000000..a9211ed279 Binary files /dev/null and b/charts/budibase/charts/couchdb-4.5.6.tgz differ diff --git a/charts/budibase/templates/app-service-deployment.yaml b/charts/budibase/templates/app-service-deployment.yaml index ed7166ec5d..6b4fbe5dda 100644 --- a/charts/budibase/templates/app-service-deployment.yaml +++ b/charts/budibase/templates/app-service-deployment.yaml @@ -42,6 +42,14 @@ spec: {{ else }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }} {{ end }} + {{ if .Values.globals.sqs.enabled }} + - name: COUCH_DB_SQL_URL + {{ if .Values.globals.sqs.url }} + value: {{ .Values.globals.sqs.url }} + {{ else }} + value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }} + {{ end }} + {{ end }} {{ if .Values.services.couchdb.enabled }} - name: COUCH_DB_USER valueFrom: @@ -198,10 +206,21 @@ spec: - name: APP_FEATURES value: "api" {{- end }} + {{- if .Values.globals.sqs.enabled }} + - name: SQS_SEARCH_ENABLE + value: "true" + {{- end }} {{- range .Values.services.apps.extraEnv }} - name: {{ .name }} value: {{ .value | quote }} {{- end }} + {{- range .Values.services.apps.extraEnvFromSecret}} + - name: {{ .name }} + valueFrom: + secretKeyRef: + name: {{ .secretName }} + key: {{ .secretKey | quote }} + {{- end}} image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }} imagePullPolicy: Always {{- if .Values.services.apps.startupProbe }} diff --git a/charts/budibase/templates/automation-worker-service-deployment.yaml b/charts/budibase/templates/automation-worker-service-deployment.yaml index 3c6f94ae9e..38a384626e 100644 --- a/charts/budibase/templates/automation-worker-service-deployment.yaml +++ b/charts/budibase/templates/automation-worker-service-deployment.yaml @@ -201,6 +201,13 @@ spec: - name: {{ .name }} value: {{ .value | quote }} {{- end }} + {{- range .Values.services.automationWorkers.extraEnvFromSecret}} + - name: {{ .name }} + valueFrom: + secretKeyRef: + name: {{ .secretName }} + key: {{ .secretKey | quote }} + {{- end}} image: budibase/apps:{{ .Values.globals.appVersion | default .Chart.AppVersion }} imagePullPolicy: Always @@ -272,4 +279,4 @@ spec: {{- toYaml .Values.services.automationWorkers.extraVolumes | nindent 8 }} {{ end }} status: {} -{{- end }} \ No newline at end of file +{{- end }} diff --git a/charts/budibase/templates/worker-service-deployment.yaml b/charts/budibase/templates/worker-service-deployment.yaml index 66a9bb6c14..ecab8dc645 100644 --- a/charts/budibase/templates/worker-service-deployment.yaml +++ b/charts/budibase/templates/worker-service-deployment.yaml @@ -56,6 +56,14 @@ spec: {{ else }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }} {{ end }} + {{ if .Values.globals.sqs.enabled }} + - name: COUCH_DB_SQL_URL + {{ if .Values.globals.sqs.url }} + value: {{ .Values.globals.sqs.url }} + {{ else }} + value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }} + {{ end }} + {{ end }} - name: API_ENCRYPTION_KEY value: {{ .Values.globals.apiEncryptionKey | quote }} - name: HTTP_LOGGING @@ -184,10 +192,21 @@ spec: - name: NODE_TLS_REJECT_UNAUTHORIZED value: {{ .Values.services.tlsRejectUnauthorized }} {{ end }} + {{- if .Values.globals.sqs.enabled }} + - name: SQS_SEARCH_ENABLE + value: "true" + {{- end }} {{- range .Values.services.worker.extraEnv }} - name: {{ .name }} value: {{ .value | quote }} {{- end }} + {{- range .Values.services.worker.extraEnvFromSecret}} + - name: {{ .name }} + valueFrom: + secretKeyRef: + name: {{ .secretName }} + key: {{ .secretKey | quote }} + {{- end}} image: budibase/worker:{{ .Values.globals.appVersion | default .Chart.AppVersion }} imagePullPolicy: Always {{- if .Values.services.worker.startupProbe }} diff --git a/charts/budibase/values.yaml b/charts/budibase/values.yaml index 27037cdaa8..4e80be7322 100644 --- a/charts/budibase/values.yaml +++ b/charts/budibase/values.yaml @@ -138,6 +138,15 @@ globals: # -- The password to use when authenticating with your SMTP server. password: "" + sqs: + # -- Whether to use the CouchDB "structured query service" or not. This is disabled by + # default for now, but will become the default in a future release. + enabled: false + # @ignore + url: "" + # @ignore + port: "4984" + services: # -- The DNS suffix to use for service discovery. You only need to change this # if you've configured your cluster to use a different DNS suffix. @@ -240,6 +249,13 @@ services: # -- Extra environment variables to set for apps pods. Takes a list of # name=value pairs. extraEnv: [] + # -- Name of the K8s Secret in the same namespace which contains the extra environment variables. + # This can be used to avoid storing sensitive information in the values.yaml file. + extraEnvFromSecret: [] + # - name: MY_SECRET_KEY + # secretName : my-secret + # secretKey: my-secret-key + # -- Startup probe configuration for apps pods. You shouldn't need to # change this, but if you want to you can find more information here: # @@ -323,6 +339,13 @@ services: # -- Extra environment variables to set for automation worker pods. Takes a list of # name=value pairs. extraEnv: [] + # -- Name of the K8s Secret in the same namespace which contains the extra environment variables. + # This can be used to avoid storing sensitive information in the values.yaml file. + extraEnvFromSecret: [] + # - name: MY_SECRET_KEY + # secretName : my-secret + # secretKey: my-secret-key + # -- Startup probe configuration for automation worker pods. You shouldn't # need to change this, but if you want to you can find more information # here: @@ -408,6 +431,13 @@ services: # -- Extra environment variables to set for worker pods. Takes a list of # name=value pairs. extraEnv: [] + # -- Name of the K8s Secret in the same namespace which contains the extra environment variables. + # This can be used to avoid storing sensitive information in the values.yaml file. + extraEnvFromSecret: [] + # - name: MY_SECRET_KEY + # secretName : my-secret + # secretKey: my-secret-key + # -- Startup probe configuration for worker pods. You shouldn't need to # change this, but if you want to you can find more information here: # @@ -611,10 +641,25 @@ couchdb: # @ignore repository: budibase/couchdb # @ignore - tag: v3.2.1 + tag: v3.3.3 # @ignore pullPolicy: Always + extraPorts: + # -- Extra ports to expose on the CouchDB service. We expose the SQS port + # by default, but you can add more ports here if you need to. + - name: sqs + containerPort: 4984 + + service: + extraPorts: + # -- Extra ports to expose on the CouchDB service. We expose the SQS port + # by default, but you can add more ports here if you need to. + - name: sqs + port: 4984 + targetPort: 4984 + protocol: TCP + # @ignore # This should remain false. We ship Clouseau ourselves as part of the # budibase/couchdb image, and it's not possible to disable it because it's a diff --git a/examples/nextjs-api-sales/package.json b/examples/nextjs-api-sales/package.json index 9303874a77..481197b26c 100644 --- a/examples/nextjs-api-sales/package.json +++ b/examples/nextjs-api-sales/package.json @@ -22,6 +22,6 @@ "@types/react": "17.0.39", "eslint": "8.10.0", "eslint-config-next": "12.1.0", - "typescript": "5.2.2" + "typescript": "5.5.2" } } diff --git a/examples/nextjs-api-sales/yarn.lock b/examples/nextjs-api-sales/yarn.lock index 1f34ca2ee2..93e26a954d 100644 --- a/examples/nextjs-api-sales/yarn.lock +++ b/examples/nextjs-api-sales/yarn.lock @@ -333,11 +333,11 @@ brace-expansion@^1.1.7: concat-map "0.0.1" braces@^3.0.1, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + version "3.0.3" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== dependencies: - fill-range "^7.0.1" + fill-range "^7.1.1" bulma@^0.9.3: version "0.9.3" @@ -781,10 +781,10 @@ file-entry-cache@^6.0.1: dependencies: flat-cache "^3.0.4" -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== dependencies: to-regex-range "^5.0.1" @@ -1709,10 +1709,10 @@ type-fest@^0.20.2: resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== -typescript@4.6.2: - version "4.6.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.6.2.tgz#fe12d2727b708f4eef40f51598b3398baa9611d4" - integrity sha512-HM/hFigTBHZhLXshn9sN37H085+hQGeJHJ/X7LpBWLID/fbc2acUMfU+lGD98X81sKP+pFa9f0DZmCwB9GnbAg== +typescript@5.2.2: + version "5.2.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" + integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w== unbox-primitive@^1.0.1: version "1.0.1" diff --git a/hosting/proxy/nginx.prod.conf b/hosting/proxy/nginx.prod.conf index 12b8df049f..59722dac5c 100644 --- a/hosting/proxy/nginx.prod.conf +++ b/hosting/proxy/nginx.prod.conf @@ -74,6 +74,7 @@ http { add_header X-Content-Type-Options nosniff always; add_header X-XSS-Protection "1; mode=block" always; add_header Content-Security-Policy "${csp_default}; ${csp_script}; ${csp_style}; ${csp_object}; ${csp_base_uri}; ${csp_connect}; ${csp_font}; ${csp_frame}; ${csp_img}; ${csp_manifest}; ${csp_media}; ${csp_worker};" always; + add_header Strict-Transport-Security "max-age=31536000; includeSubDomains; preload" always; # upstreams set $apps ${APPS_UPSTREAM_URL}; diff --git a/lerna.json b/lerna.json index 335df975af..1bce056679 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.27.5", + "version": "2.29.5", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/package.json b/package.json index 08176fae90..d4a51f2e62 100644 --- a/package.json +++ b/package.json @@ -27,7 +27,7 @@ "proper-lockfile": "^4.1.2", "svelte": "^4.2.10", "svelte-eslint-parser": "^0.33.1", - "typescript": "5.2.2", + "typescript": "5.5.2", "typescript-eslint": "^7.3.1", "yargs": "^17.7.2" }, @@ -37,10 +37,10 @@ "build": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream", "build:apps": "yarn build --scope @budibase/server --scope @budibase/worker", "build:cli": "yarn build --scope @budibase/cli", - "build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui", - "build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal --scope @budibase/account-portal-server --scope @budibase/account-portal-ui", + "build:oss": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --ignore @budibase/account-portal-server --ignore @budibase/account-portal-ui", + "build:account-portal": "NODE_OPTIONS=--max-old-space-size=1500 lerna run build --stream --scope @budibase/account-portal-server --scope @budibase/account-portal-ui", "build:dev": "lerna run --stream prebuild && yarn nx run-many --target=build --output-style=dynamic --watch --preserveWatchOutput", - "check:types": "lerna run --concurrency 2 check:types", + "check:types": "lerna run --concurrency 2 check:types --ignore @budibase/account-portal-server", "build:sdk": "lerna run --stream build:sdk", "deps:circular": "madge packages/server/dist/index.js packages/worker/src/index.ts packages/backend-core/dist/src/index.js packages/cli/src/index.js --circular", "release": "lerna publish from-package --yes --force-publish --no-git-tag-version --no-push --no-git-reset", diff --git a/packages/account-portal b/packages/account-portal index 39acfff42a..ff16525b73 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit 39acfff42a063e5a8a7d58d36721ec3103e16348 +Subproject commit ff16525b73c5751d344f5c161a682609c0a993f2 diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index f61059cc97..88b970884c 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -16,7 +16,7 @@ "prepack": "cp package.json dist", "build": "tsc -p tsconfig.build.json --paths null && node ./scripts/build.js", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "test": "bash scripts/test.sh", "test:watch": "jest --watchAll" }, @@ -79,7 +79,7 @@ "pouchdb-adapter-memory": "7.2.2", "testcontainers": "^10.7.2", "timekeeper": "2.2.0", - "typescript": "5.2.2" + "typescript": "5.5.2" }, "nx": { "targets": { diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index 2fd713119b..3085b91ef1 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -72,4 +72,4 @@ export const DEFAULT_JOBS_TABLE_ID = "ta_bb_jobs" export const DEFAULT_INVENTORY_TABLE_ID = "ta_bb_inventory" export const DEFAULT_EXPENSES_TABLE_ID = "ta_bb_expenses" export const DEFAULT_EMPLOYEE_TABLE_ID = "ta_bb_employee" -export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" +export { DEFAULT_BB_DATASOURCE_ID } from "@budibase/shared-core" diff --git a/packages/backend-core/src/db/constants.ts b/packages/backend-core/src/db/constants.ts index bfa7595d62..69c98fe569 100644 --- a/packages/backend-core/src/db/constants.ts +++ b/packages/backend-core/src/db/constants.ts @@ -1,14 +1,5 @@ -export const CONSTANT_INTERNAL_ROW_COLS = [ - "_id", - "_rev", - "type", - "createdAt", - "updatedAt", - "tableId", -] as const - -export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const - -export function isInternalColumnName(name: string): boolean { - return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name) -} +export { + CONSTANT_INTERNAL_ROW_COLS, + CONSTANT_EXTERNAL_ROW_COLS, + isInternalColumnName, +} from "@budibase/shared-core" diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 8194d1aabf..4db63ad695 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -8,6 +8,7 @@ import { DatabaseOpts, DatabasePutOpts, DatabaseQueryOpts, + DBError, Document, isDocument, RowResponse, @@ -41,7 +42,7 @@ function buildNano(couchInfo: { url: string; cookie: string }) { type DBCall = () => Promise -class CouchDBError extends Error { +class CouchDBError extends Error implements DBError { status: number statusCode: number reason: string @@ -328,7 +329,14 @@ export class DatabaseImpl implements Database { async sqlDiskCleanup(): Promise { const dbName = this.name const url = `/${dbName}/_cleanup` - return await this._sqlQuery(url, "POST") + try { + await this._sqlQuery(url, "POST") + } catch (err: any) { + // hack for now - SQS throws a 500 when there is nothing to clean-up + if (err.status !== 500) { + throw err + } + } } // removes a document from sqlite @@ -352,18 +360,15 @@ export class DatabaseImpl implements Database { } async destroy() { + if (env.SQS_SEARCH_ENABLE && (await this.exists(SQLITE_DESIGN_DOC_ID))) { + // delete the design document, then run the cleanup operation + const definition = await this.get(SQLITE_DESIGN_DOC_ID) + // remove all tables - save the definition then trigger a cleanup + definition.sql.tables = {} + await this.put(definition) + await this.sqlDiskCleanup() + } try { - if (env.SQS_SEARCH_ENABLE) { - // delete the design document, then run the cleanup operation - try { - const definition = await this.get( - SQLITE_DESIGN_DOC_ID - ) - await this.remove(SQLITE_DESIGN_DOC_ID, definition._rev) - } finally { - await this.sqlDiskCleanup() - } - } return await this.nano().db.destroy(this.name) } catch (err: any) { // didn't exist, don't worry diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 1e7da2f9a2..e58660a889 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -93,15 +93,21 @@ function isApps() { return environment.SERVICE_TYPE === ServiceType.APPS } +function isQA() { + return environment.BUDIBASE_ENVIRONMENT === "QA" +} + const environment = { isTest, isJest, isDev, isWorker, isApps, + isQA, isProd: () => { return !isDev() }, + BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT, JS_BCRYPT: process.env.JS_BCRYPT, JWT_SECRET: process.env.JWT_SECRET, JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK, @@ -120,6 +126,7 @@ const environment = { REDIS_CLUSTERED: process.env.REDIS_CLUSTERED, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, + AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN, AWS_REGION: process.env.AWS_REGION, MINIO_URL: process.env.MINIO_URL, MINIO_ENABLED: process.env.MINIO_ENABLED || 1, diff --git a/packages/backend-core/src/middleware/joi-validator.ts b/packages/backend-core/src/middleware/joi-validator.ts index ac8064a512..5047cdbbc1 100644 --- a/packages/backend-core/src/middleware/joi-validator.ts +++ b/packages/backend-core/src/middleware/joi-validator.ts @@ -3,7 +3,8 @@ import { Ctx } from "@budibase/types" function validate( schema: Joi.ObjectSchema | Joi.ArraySchema, - property: string + property: string, + opts: { errorPrefix: string } = { errorPrefix: `Invalid ${property}` } ) { // Return a Koa middleware function return (ctx: Ctx, next: any) => { @@ -29,16 +30,26 @@ function validate( const { error } = schema.validate(params) if (error) { - ctx.throw(400, `Invalid ${property} - ${error.message}`) + let message = error.message + if (opts.errorPrefix) { + message = `Invalid ${property} - ${message}` + } + ctx.throw(400, message) } return next() } } -export function body(schema: Joi.ObjectSchema | Joi.ArraySchema) { - return validate(schema, "body") +export function body( + schema: Joi.ObjectSchema | Joi.ArraySchema, + opts?: { errorPrefix: string } +) { + return validate(schema, "body", opts) } -export function params(schema: Joi.ObjectSchema | Joi.ArraySchema) { - return validate(schema, "params") +export function params( + schema: Joi.ObjectSchema | Joi.ArraySchema, + opts?: { errorPrefix: string } +) { + return validate(schema, "params", opts) } diff --git a/packages/backend-core/src/objectStore/objectStore.ts b/packages/backend-core/src/objectStore/objectStore.ts index 0ac2c35179..68b1b10ec2 100644 --- a/packages/backend-core/src/objectStore/objectStore.ts +++ b/packages/backend-core/src/objectStore/objectStore.ts @@ -14,6 +14,7 @@ import { v4 } from "uuid" import { APP_PREFIX, APP_DEV_PREFIX } from "../db" import fsp from "fs/promises" import { HeadObjectOutput } from "aws-sdk/clients/s3" +import { ReadableStream } from "stream/web" const streamPipeline = promisify(stream.pipeline) // use this as a temporary store of buckets that are being created @@ -41,10 +42,7 @@ type UploadParams = BaseUploadParams & { path?: string | PathLike } -export type StreamTypes = - | ReadStream - | NodeJS.ReadableStream - | ReadableStream +export type StreamTypes = ReadStream | NodeJS.ReadableStream export type StreamUploadParams = BaseUploadParams & { stream?: StreamTypes @@ -103,6 +101,11 @@ export function ObjectStore( } } + // for AWS Credentials using temporary session token + if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) { + config.sessionToken = env.AWS_SESSION_TOKEN + } + // custom S3 is in use i.e. minio if (env.MINIO_URL) { if (opts.presigning && env.MINIO_ENABLED) { @@ -222,6 +225,9 @@ export async function streamUpload({ extra, ttl, }: StreamUploadParams) { + if (!stream) { + throw new Error("Stream to upload is invalid/undefined") + } const extension = filename.split(".").pop() const objectStore = ObjectStore(bucketName) const bucketCreated = await createBucketIfNotExists(objectStore, bucketName) @@ -251,14 +257,27 @@ export async function streamUpload({ : CONTENT_TYPE_MAP.txt } + const bucket = sanitizeBucket(bucketName), + objKey = sanitizeKey(filename) const params = { - Bucket: sanitizeBucket(bucketName), - Key: sanitizeKey(filename), + Bucket: bucket, + Key: objKey, Body: stream, ContentType: contentType, ...extra, } - return objectStore.upload(params).promise() + + const details = await objectStore.upload(params).promise() + const headDetails = await objectStore + .headObject({ + Bucket: bucket, + Key: objKey, + }) + .promise() + return { + ...details, + ContentLength: headDetails.ContentLength, + } } /** diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index 333accc985..62b971f9f5 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -63,12 +63,12 @@ class InMemoryQueue implements Partial { * Same callback API as Bull, each callback passed to this will consume messages as they are * available. Please note this is a queue service, not a notification service, so each * consumer will receive different messages. - * @param func The callback function which will return a "Job", the same * as the Bull API, within this job the property "data" contains the JSON message. Please * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - async process(func: any) { + async process(concurrencyOrFunc: number | any, func?: any) { + func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc this._emitter.on("message", async () => { if (this._messages.length <= 0) { return diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 1838eed92f..f633d0885e 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -21,6 +21,7 @@ let cleanupInterval: NodeJS.Timeout async function cleanup() { for (let queue of QUEUES) { await queue.clean(CLEANUP_PERIOD_MS, "completed") + await queue.clean(CLEANUP_PERIOD_MS, "failed") } } diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index 61d5849058..615753efc3 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -1,10 +1,10 @@ import { Knex, knex } from "knex" import * as dbCore from "../db" import { - isIsoDateString, - isValidFilter, getNativeSql, isExternalTable, + isIsoDateString, + isValidFilter, } from "./utils" import { SqlStatements } from "./sqlStatements" import SqlTableQueryBuilder from "./sqlTable" @@ -12,21 +12,21 @@ import { BBReferenceFieldMetadata, FieldSchema, FieldType, + INTERNAL_TABLE_SOURCE_ID, JsonFieldMetadata, + JsonTypes, Operation, + prefixed, QueryJson, - SqlQuery, + QueryOptions, RelationshipsJson, SearchFilters, - SortDirection, + SortOrder, + SqlClient, + SqlQuery, SqlQueryBinding, Table, TableSourceType, - INTERNAL_TABLE_SOURCE_ID, - SqlClient, - QueryOptions, - JsonTypes, - prefixed, } from "@budibase/types" import environment from "../environment" import { helpers } from "@budibase/shared-core" @@ -114,7 +114,7 @@ function generateSelectStatement( ): (string | Knex.Raw)[] | "*" { const { resource, meta } = json - if (!resource) { + if (!resource || !resource.fields || resource.fields.length === 0) { return "*" } @@ -184,7 +184,11 @@ class InternalBuilder { query: Knex.QueryBuilder, filters: SearchFilters | undefined, table: Table, - opts: { aliases?: Record; relationship?: boolean } + opts: { + aliases?: Record + relationship?: boolean + columnPrefix?: string + } ): Knex.QueryBuilder { if (!filters) { return query @@ -192,7 +196,10 @@ class InternalBuilder { filters = parseFilters(filters) // if all or specified in filters, then everything is an or const allOr = filters.allOr - const sqlStatements = new SqlStatements(this.client, table, { allOr }) + const sqlStatements = new SqlStatements(this.client, table, { + allOr, + columnPrefix: opts.columnPrefix, + }) const tableName = this.client === SqlClient.SQL_LITE ? table._id! : table.name @@ -397,9 +404,9 @@ class InternalBuilder { contains(filters.containsAny, true) } + const tableRef = opts?.aliases?.[table._id!] || table._id // when searching internal tables make sure long looking for rows - if (filters.documentType && !isExternalTable(table)) { - const tableRef = opts?.aliases?.[table._id!] || table._id + if (filters.documentType && !isExternalTable(table) && tableRef) { // has to be its own option, must always be AND onto the search query.andWhereLike( `${tableRef}._id`, @@ -410,28 +417,50 @@ class InternalBuilder { return query } - addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { - let { sort, paginate } = json + addDistinctCount( + query: Knex.QueryBuilder, + json: QueryJson + ): Knex.QueryBuilder { const table = json.meta.table + const primary = table.primary + const aliases = json.tableAliases + const aliased = + table.name && aliases?.[table.name] ? aliases[table.name] : table.name + if (!primary) { + throw new Error("SQL counting requires primary key to be supplied") + } + return query.countDistinct(`${aliased}.${primary[0]} as total`) + } + + addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { + let { sort } = json + const table = json.meta.table + const primaryKey = table.primary const tableName = getTableName(table) const aliases = json.tableAliases const aliased = tableName && aliases?.[tableName] ? aliases[tableName] : table?.name + if (!Array.isArray(primaryKey)) { + throw new Error("Sorting requires primary key to be specified for table") + } if (sort && Object.keys(sort || {}).length > 0) { for (let [key, value] of Object.entries(sort)) { const direction = - value.direction === SortDirection.ASCENDING ? "asc" : "desc" + value.direction === SortOrder.ASCENDING ? "asc" : "desc" let nulls if (this.client === SqlClient.POSTGRES) { // All other clients already sort this as expected by default, and adding this to the rest of the clients is causing issues - nulls = value.direction === SortDirection.ASCENDING ? "first" : "last" + nulls = value.direction === SortOrder.ASCENDING ? "first" : "last" } query = query.orderBy(`${aliased}.${key}`, direction, nulls) } - } else if (this.client === SqlClient.MS_SQL && paginate?.limit) { - // @ts-ignore - query = query.orderBy(`${aliased}.${table?.primary[0]}`) + } + + // add sorting by the primary key if the result isn't already sorted by it, + // to make sure result is deterministic + if (!sort || sort[primaryKey[0]] === undefined) { + query = query.orderBy(`${aliased}.${primaryKey[0]}`) } return query } @@ -522,7 +551,7 @@ class InternalBuilder { }) } } - return query.limit(BASE_LIMIT) + return query } knexWithAlias( @@ -533,13 +562,12 @@ class InternalBuilder { const tableName = endpoint.entityId const tableAlias = aliases?.[tableName] - const query = knex( + return knex( this.tableNameWithSchema(tableName, { alias: tableAlias, schema: endpoint.schema, }) ) - return query } create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { @@ -571,52 +599,95 @@ class InternalBuilder { return query.insert(parsedBody) } - read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder { - let { endpoint, resource, filters, paginate, relationships, tableAliases } = - json + bulkUpsert(knex: Knex, json: QueryJson): Knex.QueryBuilder { + const { endpoint, body } = json + let query = this.knexWithAlias(knex, endpoint) + if (!Array.isArray(body)) { + return query + } + const parsedBody = body.map(row => parseBody(row)) + if ( + this.client === SqlClient.POSTGRES || + this.client === SqlClient.SQL_LITE || + this.client === SqlClient.MY_SQL + ) { + const primary = json.meta.table.primary + if (!primary) { + throw new Error("Primary key is required for upsert") + } + const ret = query.insert(parsedBody).onConflict(primary).merge() + return ret + } else if (this.client === SqlClient.MS_SQL) { + // No upsert or onConflict support in MSSQL yet, see: + // https://github.com/knex/knex/pull/6050 + return query.insert(parsedBody) + } + return query.upsert(parsedBody) + } + + read( + knex: Knex, + json: QueryJson, + opts: { + limits?: { base: number; query: number } + } = {} + ): Knex.QueryBuilder { + let { endpoint, filters, paginate, relationships, tableAliases } = json + const { limits } = opts + const counting = endpoint.operation === Operation.COUNT const tableName = endpoint.entityId - // select all if not specified - if (!resource) { - resource = { fields: [] } - } - let selectStatement: string | (string | Knex.Raw)[] = "*" - // handle select - if (resource.fields && resource.fields.length > 0) { - // select the resources as the format "table.columnName" - this is what is provided - // by the resource builder further up - selectStatement = generateSelectStatement(json, knex) - } - let foundLimit = limit || BASE_LIMIT + // start building the query + let query = this.knexWithAlias(knex, endpoint, tableAliases) // handle pagination let foundOffset: number | null = null + let foundLimit = limits?.query || limits?.base if (paginate && paginate.page && paginate.limit) { // @ts-ignore const page = paginate.page <= 1 ? 0 : paginate.page - 1 const offset = page * paginate.limit foundLimit = paginate.limit foundOffset = offset + } else if (paginate && paginate.offset && paginate.limit) { + foundLimit = paginate.limit + foundOffset = paginate.offset } else if (paginate && paginate.limit) { foundLimit = paginate.limit } - // start building the query - let query = this.knexWithAlias(knex, endpoint, tableAliases) - query = query.limit(foundLimit) - if (foundOffset) { - query = query.offset(foundOffset) + // counting should not sort, limit or offset + if (!counting) { + // add the found limit if supplied + if (foundLimit != null) { + query = query.limit(foundLimit) + } + // add overall pagination + if (foundOffset != null) { + query = query.offset(foundOffset) + } + // add sorting to pre-query + // no point in sorting when counting + query = this.addSorting(query, json) } + // add filters to the query (where) query = this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, aliases: tableAliases, }) - // add sorting to pre-query - query = this.addSorting(query, json) const alias = tableAliases?.[tableName] || tableName - let preQuery = knex({ - [alias]: query, - } as any).select(selectStatement) as any + let preQuery: Knex.QueryBuilder = knex({ + // the typescript definition for the knex constructor doesn't support this + // syntax, but it is the only way to alias a pre-query result as part of + // a query - there is an alias dictionary type, but it assumes it can only + // be a table name, not a pre-query + [alias]: query as any, + }) + // if counting, use distinct count, else select + preQuery = !counting + ? preQuery.select(generateSelectStatement(json, knex)) + : this.addDistinctCount(preQuery, json) // have to add after as well (this breaks MS-SQL) - if (this.client !== SqlClient.MS_SQL) { + if (this.client !== SqlClient.MS_SQL && !counting) { preQuery = this.addSorting(preQuery, json) } // handle joins @@ -627,7 +698,15 @@ class InternalBuilder { endpoint.schema, tableAliases ) + + // add a base limit over the whole query + // if counting we can't set this limit + if (limits?.base) { + query = query.limit(limits.base) + } + return this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, relationship: true, aliases: tableAliases, }) @@ -638,6 +717,7 @@ class InternalBuilder { let query = this.knexWithAlias(knex, endpoint, tableAliases) const parsedBody = parseBody(body) query = this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, aliases: tableAliases, }) // mysql can't use returning @@ -652,6 +732,7 @@ class InternalBuilder { const { endpoint, filters, tableAliases } = json let query = this.knexWithAlias(knex, endpoint, tableAliases) query = this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, aliases: tableAliases, }) // mysql can't use returning @@ -671,6 +752,19 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { this.limit = limit } + private convertToNative(query: Knex.QueryBuilder, opts: QueryOptions = {}) { + const sqlClient = this.getSqlClient() + if (opts?.disableBindings) { + return { sql: query.toString() } + } else { + let native = getNativeSql(query) + if (sqlClient === SqlClient.SQL_LITE) { + native = convertBooleans(native) + } + return native + } + } + /** * @param json The JSON query DSL which is to be converted to SQL. * @param opts extra options which are to be passed into the query builder, e.g. disableReturning @@ -694,7 +788,16 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { query = builder.create(client, json, opts) break case Operation.READ: - query = builder.read(client, json, this.limit) + query = builder.read(client, json, { + limits: { + query: this.limit, + base: BASE_LIMIT, + }, + }) + break + case Operation.COUNT: + // read without any limits to count + query = builder.read(client, json) break case Operation.UPDATE: query = builder.update(client, json, opts) @@ -705,6 +808,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { case Operation.BULK_CREATE: query = builder.bulkCreate(client, json) break + case Operation.BULK_UPSERT: + query = builder.bulkUpsert(client, json) + break case Operation.CREATE_TABLE: case Operation.UPDATE_TABLE: case Operation.DELETE_TABLE: @@ -713,15 +819,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { throw `Operation type is not supported by SQL query builder` } - if (opts?.disableBindings) { - return { sql: query.toString() } - } else { - let native = getNativeSql(query) - if (sqlClient === SqlClient.SQL_LITE) { - native = convertBooleans(native) - } - return native - } + return this.convertToNative(query, opts) } async getReturningRow(queryFn: QueryFunction, json: QueryJson) { @@ -797,6 +895,9 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { await this.getReturningRow(queryFn, this.checkLookupKeys(id, json)) ) } + if (operation === Operation.COUNT) { + return results + } if (operation !== Operation.READ) { return row } diff --git a/packages/backend-core/src/sql/sqlStatements.ts b/packages/backend-core/src/sql/sqlStatements.ts index a80defd8b8..311f7c7d49 100644 --- a/packages/backend-core/src/sql/sqlStatements.ts +++ b/packages/backend-core/src/sql/sqlStatements.ts @@ -5,19 +5,27 @@ export class SqlStatements { client: string table: Table allOr: boolean | undefined + columnPrefix: string | undefined + constructor( client: string, table: Table, - { allOr }: { allOr?: boolean } = {} + { allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {} ) { this.client = client this.table = table this.allOr = allOr + this.columnPrefix = columnPrefix } getField(key: string): FieldSchema | undefined { const fieldName = key.split(".")[1] - return this.table.schema[fieldName] + let found = this.table.schema[fieldName] + if (!found && this.columnPrefix) { + const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "") + found = this.table.schema[prefixRemovedFieldName] + } + return found } between( diff --git a/packages/backend-core/src/sql/sqlTable.ts b/packages/backend-core/src/sql/sqlTable.ts index 09f9908baa..bdc8a3dd69 100644 --- a/packages/backend-core/src/sql/sqlTable.ts +++ b/packages/backend-core/src/sql/sqlTable.ts @@ -109,8 +109,10 @@ function generateSchema( const { tableName } = breakExternalTableId(column.tableId) // @ts-ignore const relatedTable = tables[tableName] - if (!relatedTable) { - throw new Error("Referenced table doesn't exist") + if (!relatedTable || !relatedTable.primary) { + throw new Error( + "Referenced table doesn't exist or has no primary keys" + ) } const relatedPrimary = relatedTable.primary[0] const externalType = relatedTable.schema[relatedPrimary].externalType diff --git a/packages/backend-core/src/sql/utils.ts b/packages/backend-core/src/sql/utils.ts index 2d9b289417..45ab510948 100644 --- a/packages/backend-core/src/sql/utils.ts +++ b/packages/backend-core/src/sql/utils.ts @@ -55,10 +55,7 @@ export function buildExternalTableId(datasourceId: string, tableName: string) { return `${datasourceId}${DOUBLE_SEPARATOR}${tableName}` } -export function breakExternalTableId(tableId: string | undefined) { - if (!tableId) { - return {} - } +export function breakExternalTableId(tableId: string) { const parts = tableId.split(DOUBLE_SEPARATOR) let datasourceId = parts.shift() // if they need joined @@ -67,6 +64,9 @@ export function breakExternalTableId(tableId: string | undefined) { if (tableName.includes(ENCODED_SPACE)) { tableName = decodeURIComponent(tableName) } + if (!datasourceId || !tableName) { + throw new Error("Unable to get datasource/table name from table ID") + } return { datasourceId, tableName } } diff --git a/packages/backend-core/src/tenancy/db.ts b/packages/backend-core/src/tenancy/db.ts index 10477a8579..d430296a5c 100644 --- a/packages/backend-core/src/tenancy/db.ts +++ b/packages/backend-core/src/tenancy/db.ts @@ -1,6 +1,21 @@ import { getDB } from "../db/db" import { getGlobalDBName } from "../context" +import { TenantInfo } from "@budibase/types" export function getTenantDB(tenantId: string) { return getDB(getGlobalDBName(tenantId)) } + +export async function saveTenantInfo(tenantInfo: TenantInfo) { + const db = getTenantDB(tenantInfo.tenantId) + // save the tenant info to db + return db.put({ + _id: "tenant_info", + ...tenantInfo, + }) +} + +export async function getTenantInfo(tenantId: string): Promise { + const db = getTenantDB(tenantId) + return db.get("tenant_info") +} diff --git a/packages/backend-core/tests/core/utilities/structures/accounts.ts b/packages/backend-core/tests/core/utilities/structures/accounts.ts index 7dcc2de116..29453ad60a 100644 --- a/packages/backend-core/tests/core/utilities/structures/accounts.ts +++ b/packages/backend-core/tests/core/utilities/structures/accounts.ts @@ -24,7 +24,6 @@ export const account = (partial: Partial = {}): Account => { createdAt: Date.now(), verified: true, verificationSent: true, - tier: "FREE", // DEPRECATED authType: AuthType.PASSWORD, name: generator.name(), size: "10+", diff --git a/packages/bbui/src/Modal/Modal.svelte b/packages/bbui/src/Modal/Modal.svelte index 4656be69d1..dec1455d0c 100644 --- a/packages/bbui/src/Modal/Modal.svelte +++ b/packages/bbui/src/Modal/Modal.svelte @@ -162,6 +162,7 @@ max-height: 100%; } .modal-inner-wrapper { + padding: 40px; flex: 1 1 auto; display: flex; flex-direction: row; @@ -176,7 +177,6 @@ border: 2px solid var(--spectrum-global-color-gray-200); overflow: visible; max-height: none; - margin: 40px 0; transform: none; --spectrum-dialog-confirm-border-radius: var( --spectrum-global-dimension-size-100 diff --git a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte index 85ae1924d0..01fde43d12 100644 --- a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte +++ b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte @@ -15,6 +15,9 @@ Checkbox, DatePicker, DrawerContent, + Toggle, + Icon, + Divider, } from "@budibase/bbui" import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte" import { automationStore, selectedAutomation, tables } from "stores/builder" @@ -40,7 +43,7 @@ EditorModes, } from "components/common/CodeEditor" import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte" - import { LuceneUtils, Utils } from "@budibase/frontend-core" + import { QueryUtils, Utils, search } from "@budibase/frontend-core" import { getSchemaForDatasourcePlus, getEnvironmentBindings, @@ -72,7 +75,11 @@ $: schema = getSchemaForDatasourcePlus(tableId, { searchableSchema: true, }).schema - $: schemaFields = Object.values(schema || {}) + $: schemaFields = search.getFields( + $tables.list, + Object.values(schema || {}), + { allowLinks: true } + ) $: queryLimit = tableId?.includes("datasource") ? "∞" : "1000" $: isTrigger = block?.type === "TRIGGER" $: isUpdateRow = stepId === ActionStepID.UPDATE_ROW @@ -88,6 +95,8 @@ ? [hbAutocomplete([...bindingsToCompletions(bindings, codeMode)])] : [] + let testDataRowVisibility = {} + const getInputData = (testData, blockInputs) => { // Test data is not cloned for reactivity let newInputData = testData || cloneDeep(blockInputs) @@ -118,7 +127,6 @@ searchableSchema: true, }).schema } - try { if (isTestModal) { let newTestData = { schema } @@ -196,7 +204,8 @@ (automation.trigger?.event === "row:update" || automation.trigger?.event === "row:save") ) { - if (name !== "id" && name !== "revision") return `trigger.row.${name}` + let noRowKeywordBindings = ["id", "revision", "oldRow"] + if (!noRowKeywordBindings.includes(name)) return `trigger.row.${name}` } /* End special cases for generating custom schemas based on triggers */ @@ -343,7 +352,7 @@ } function saveFilters(key) { - const filters = LuceneUtils.buildLuceneQuery(tempFilters) + const filters = QueryUtils.buildQuery(tempFilters) const defKey = `${key}-def` onChange({ detail: filters }, key) // need to store the builder definition in the automation @@ -372,7 +381,11 @@ function getFieldLabel(key, value) { const requiredSuffix = requiredProperties.includes(key) ? "*" : "" - return `${value.title || (key === "row" ? "Table" : key)} ${requiredSuffix}` + return `${value.title || (key === "row" ? "Row" : key)} ${requiredSuffix}` + } + + function toggleTestDataRowVisibility(key) { + testDataRowVisibility[key] = !testDataRowVisibility[key] } function handleAttachmentParams(keyValueObj) { @@ -385,6 +398,16 @@ return params } + function toggleAttachmentBinding(e, key) { + onChange( + { + detail: "", + }, + key + ) + onChange({ detail: { useAttachmentBinding: e.detail } }, "meta") + } + onMount(async () => { try { await environment.loadVariables() @@ -462,27 +485,64 @@
-
- - onChange( - { - detail: e.detail.map(({ name, value }) => ({ - url: name, - filename: value, - })), - }, - key - )} - object={handleAttachmentParams(inputData[key])} - allowJS - {bindings} - keyBindings - customButtonText={"Add attachment"} - keyPlaceholder={"URL"} - valuePlaceholder={"Filename"} +
+ toggleAttachmentBinding(e, key)} />
+ +
+ {#if !inputData?.meta?.useAttachmentBinding} + + onChange( + { + detail: e.detail.map(({ name, value }) => ({ + url: name, + filename: value, + })), + }, + key + )} + object={handleAttachmentParams(inputData[key])} + allowJS + {bindings} + keyBindings + customButtonText={"Add attachment"} + keyPlaceholder={"URL"} + valuePlaceholder={"Filename"} + /> + {:else if isTestModal} + onChange(e, key)} + {bindings} + updateOnChange={false} + /> + {:else} +
+ onChange(e, key)} + {bindings} + updateOnChange={false} + placeholder={value.customType === "queryLimit" + ? queryLimit + : ""} + drawerLeft="260px" + /> +
+ {/if} +
{:else if value.customType === "filters"} Define filters @@ -560,20 +620,48 @@ on:change={e => onChange(e, key)} /> {:else if value.customType === "row"} - { - if (e.detail?.key) { - onChange(e, e.detail.key) - } else { - onChange(e, key) - } - }} - {bindings} - {isTestModal} - {isUpdateRow} - /> + {#if isTestModal} +
+ toggleTestDataRowVisibility(key)} + /> + +
+ {#if testDataRowVisibility[key]} + { + if (e.detail?.key) { + onChange(e, e.detail.key) + } else { + onChange(e, key) + } + }} + {bindings} + {isTestModal} + {isUpdateRow} + /> + {/if} + + {:else} + { + if (e.detail?.key) { + onChange(e, e.detail.key) + } else { + onChange(e, key) + } + }} + {bindings} + {isTestModal} + {isUpdateRow} + /> + {/if} {:else if value.customType === "webhookUrl"} onChange(e, key)} @@ -689,6 +777,12 @@ width: 320px; } + .align-horizontally { + display: flex; + gap: var(--spacing-s); + align-items: center; + } + .fields { display: flex; flex-direction: column; diff --git a/packages/builder/src/components/automation/SetupPanel/RowSelector.svelte b/packages/builder/src/components/automation/SetupPanel/RowSelector.svelte index b5a54138ca..bd3bcda774 100644 --- a/packages/builder/src/components/automation/SetupPanel/RowSelector.svelte +++ b/packages/builder/src/components/automation/SetupPanel/RowSelector.svelte @@ -10,12 +10,12 @@ import { TableNames } from "constants" const dispatch = createEventDispatcher() - export let value export let meta export let bindings export let isTestModal export let isUpdateRow + $: parsedBindings = bindings.map(binding => { let clone = Object.assign({}, binding) clone.icon = "ShareAndroid" @@ -94,17 +94,22 @@ dispatch("change", newValue) } - const onChangeSetting = (e, field) => { - let fields = {} - fields[field] = { - clearRelationships: e.detail, + const onChangeSetting = (field, key, value) => { + let newField = {} + newField[field] = { + [key]: value, } + + let updatedFields = { + ...meta?.fields, + ...newField, + } + dispatch("change", { key: "meta", - fields, + fields: updatedFields, }) } - // Ensure any nullish tableId values get set to empty string so // that the select works $: if (value?.tableId == null) value = { tableId: "" } @@ -157,6 +162,9 @@ bindings={parsedBindings} {value} {onChange} + useAttachmentBinding={meta?.fields?.[field] + ?.useAttachmentBinding} + {onChangeSetting} /> {/if} @@ -167,7 +175,8 @@ value={meta.fields?.[field]?.clearRelationships} text={"Clear relationships if empty?"} size={"S"} - on:change={e => onChangeSetting(e, field)} + on:change={e => + onChangeSetting(field, "clearRelationships", e.detail)} /> {/if} diff --git a/packages/builder/src/components/automation/SetupPanel/RowSelectorTypes.svelte b/packages/builder/src/components/automation/SetupPanel/RowSelectorTypes.svelte index 0a27360347..a43ff35c80 100644 --- a/packages/builder/src/components/automation/SetupPanel/RowSelectorTypes.svelte +++ b/packages/builder/src/components/automation/SetupPanel/RowSelectorTypes.svelte @@ -1,5 +1,11 @@ {#if schemaHasOptions(schema) && schema.type !== "array"} @@ -108,38 +131,65 @@ useLabel={false} /> {:else if attachmentTypes.includes(schema.type)} -
- - onChange( - { - detail: - schema.type === FieldType.ATTACHMENT_SINGLE || - schema.type === FieldType.SIGNATURE_SINGLE - ? e.detail.length > 0 - ? { - url: e.detail[0].name, - filename: e.detail[0].value, - } - : {} - : e.detail.map(({ name, value }) => ({ - url: name, - filename: value, - })), - }, - field - )} - object={handleAttachmentParams(value[field])} - allowJS - {bindings} - keyBindings - customButtonText={"Add attachment"} - keyPlaceholder={"URL"} - valuePlaceholder={"Filename"} - actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE || - schema.type === FieldType.SIGNATURE) && - Object.keys(value[field]).length >= 1} - /> +
+
+ handleToggleChange(field, e)} + /> +
+ {#if !useAttachmentBinding} +
+ { + onChange( + { + detail: + schema.type === FieldType.ATTACHMENT_SINGLE || + schema.type === FieldType.SIGNATURE_SINGLE + ? e.detail.length > 0 + ? { + url: e.detail[0].name, + filename: e.detail[0].value, + } + : {} + : e.detail.map(({ name, value }) => ({ + url: name, + filename: value, + })), + }, + field + ) + }} + object={handleAttachmentParams(value[field])} + allowJS + {bindings} + keyBindings + customButtonText={"Add attachment"} + keyPlaceholder={"URL"} + valuePlaceholder={"Filename"} + actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE || + schema.type === FieldType.SIGNATURE) && + Object.keys(value[field]).length >= 1} + /> +
+ {:else} +
+ onChange(e, field)} + type="string" + bindings={parsedBindings} + allowJS={true} + updateOnChange={false} + title={schema.name} + /> +
+ {/if}
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)} - .attachment-field-spacinng { + .attachment-field-spacing, + .json-input-spacing { margin-top: var(--spacing-s); margin-bottom: var(--spacing-l); } diff --git a/packages/builder/src/components/backend/DataTable/ViewV2DataTable.svelte b/packages/builder/src/components/backend/DataTable/ViewV2DataTable.svelte index 3b628c7b53..646b764a2c 100644 --- a/packages/builder/src/components/backend/DataTable/ViewV2DataTable.svelte +++ b/packages/builder/src/components/backend/DataTable/ViewV2DataTable.svelte @@ -1,6 +1,6 @@ + +
+ + + + + {/if}
diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ShowNotification.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ShowNotification.svelte index d95e13cb5f..ef6232b382 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ShowNotification.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/ShowNotification.svelte @@ -25,6 +25,8 @@ }, ] + const MAX_DURATION = 120000 // Maximum duration in milliseconds (2 minutes) + onMount(() => { if (!parameters.type) { parameters.type = "success" @@ -33,6 +35,14 @@ parameters.autoDismiss = true } }) + + function handleDurationChange(event) { + let newDuration = event.detail + if (newDuration > MAX_DURATION) { + newDuration = MAX_DURATION + } + parameters.duration = newDuration + }
@@ -47,6 +57,16 @@ />
diff --git a/packages/client/src/components/app/SidePanel.svelte b/packages/client/src/components/app/SidePanel.svelte index bff5a78837..d49ab33c40 100644 --- a/packages/client/src/components/app/SidePanel.svelte +++ b/packages/client/src/components/app/SidePanel.svelte @@ -29,10 +29,6 @@ } } - // $: { - - // } - // Derive visibility $: open = $sidePanelStore.contentId === $component.id diff --git a/packages/client/src/components/app/Text.svelte b/packages/client/src/components/app/Text.svelte index 1037725ff8..fa15868d0f 100644 --- a/packages/client/src/components/app/Text.svelte +++ b/packages/client/src/components/app/Text.svelte @@ -13,6 +13,7 @@ export let size let node + let touched = false $: $component.editing && node?.focus() $: placeholder = $builderStore.inBuilder && !text && !$component.editing @@ -46,7 +47,10 @@ // Convert contenteditable HTML to text and save const updateText = e => { - builderStore.actions.updateProp("text", e.target.textContent) + if (touched) { + builderStore.actions.updateProp("text", e.target.textContent) + } + touched = false } @@ -61,6 +65,7 @@ class:underline class="spectrum-Body {sizeClass} {alignClass}" on:blur={$component.editing ? updateText : null} + on:input={() => (touched = true)} > {componentText}

diff --git a/packages/client/src/components/app/blocks/ChartBlock.svelte b/packages/client/src/components/app/blocks/ChartBlock.svelte index ddfc7b522b..4bd2fec0da 100644 --- a/packages/client/src/components/app/blocks/ChartBlock.svelte +++ b/packages/client/src/components/app/blocks/ChartBlock.svelte @@ -35,6 +35,7 @@ export let valueUnits export let yAxisLabel export let xAxisLabel + export let yAxisUnits export let curve // Area @@ -85,6 +86,7 @@ valueUnits, yAxisLabel, xAxisLabel, + yAxisUnits, stacked, horizontal, curve, diff --git a/packages/client/src/components/app/blocks/FormBlockComponent.svelte b/packages/client/src/components/app/blocks/FormBlockComponent.svelte index 396dfcf808..f931319118 100644 --- a/packages/client/src/components/app/blocks/FormBlockComponent.svelte +++ b/packages/client/src/components/app/blocks/FormBlockComponent.svelte @@ -68,6 +68,15 @@ maximum: schema?.constraints?.length?.maximum, } }, + [FieldType.DATETIME]: (_field, schema) => { + const props = { + valueAsTimestamp: !schema?.timeOnly, + } + if (schema?.dateOnly) { + props.enableTime = false + } + return props + }, } const fieldSchema = getFieldSchema(field) diff --git a/packages/client/src/components/app/blocks/form/FormBlock.svelte b/packages/client/src/components/app/blocks/form/FormBlock.svelte index d249569731..e3aa20ffa6 100644 --- a/packages/client/src/components/app/blocks/form/FormBlock.svelte +++ b/packages/client/src/components/app/blocks/form/FormBlock.svelte @@ -31,41 +31,23 @@ let schema - $: formattedFields = convertOldFieldFormat(fields) - $: fieldsOrDefault = getDefaultFields(formattedFields, schema) $: fetchSchema(dataSource) $: id = $component.id - // We could simply spread $$props into the inner form and append our - // additions, but that would create svelte warnings about unused props and - // make maintenance in future more confusing as we typically always have a - // proper mapping of schema settings to component exports, without having to - // search multiple files - $: innerProps = { - dataSource, - actionUrl, - actionType, - size, - disabled, - fields: fieldsOrDefault, - title, - description, - schema, - notificationOverride, - buttons: - buttons || - Utils.buildFormBlockButtonConfig({ - _id: id, - showDeleteButton, - showSaveButton, - saveButtonLabel, - deleteButtonLabel, - notificationOverride, - actionType, - actionUrl, - dataSource, - }), - buttonPosition: buttons ? buttonPosition : "top", - } + $: formattedFields = convertOldFieldFormat(fields) + $: fieldsOrDefault = getDefaultFields(formattedFields, schema) + $: buttonsOrDefault = + buttons || + Utils.buildFormBlockButtonConfig({ + _id: id, + showDeleteButton, + showSaveButton, + saveButtonLabel, + deleteButtonLabel, + notificationOverride, + actionType, + actionUrl, + dataSource, + }) // Provide additional data context for live binding eval export const getAdditionalDataContext = () => { @@ -123,5 +105,18 @@ - + diff --git a/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte b/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte index b0733f3f4b..0227107dd2 100644 --- a/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte +++ b/packages/client/src/components/app/blocks/form/InnerFormBlock.svelte @@ -91,15 +91,13 @@ {#if description} {/if} - {#key fields} - -
- {#each fields as field, idx} - - {/each} -
-
- {/key} + +
+ {#each fields as field, idx} + + {/each} +
+
{#if buttonPosition === "bottom"} { - const changed = fieldApi.setValue(e.detail) - if (onChange && changed) { - onChange({ value: e.detail }) + let value = e.detail + if (timeOnly && valueAsTimestamp) { + if (!isValidDate(value)) { + // Handle time only fields that are timestamps under the hood + value = timeToDateISOString(value) + } } + + const changed = fieldApi.setValue(value) + if (onChange && changed) { + onChange({ value }) + } + } + + const isValidDate = value => !isNaN(new Date(value)) + + const timeToDateISOString = value => { + let [hours, minutes] = value.split(":").map(Number) + + const date = new Date() + date.setHours(hours) + date.setMinutes(minutes) + date.setSeconds(0) + date.setMilliseconds(0) + return date.toISOString() } diff --git a/packages/client/src/components/app/forms/Field.svelte b/packages/client/src/components/app/forms/Field.svelte index 74ff5442a9..9210b6ea8f 100644 --- a/packages/client/src/components/app/forms/Field.svelte +++ b/packages/client/src/components/app/forms/Field.svelte @@ -26,6 +26,10 @@ // Register field with form const formApi = formContext?.formApi const labelPos = fieldGroupContext?.labelPosition || "above" + + let touched = false + let labelNode + $: formStep = formStepContext ? $formStepContext || 1 : 1 $: formField = formApi?.registerField( field, @@ -36,14 +40,12 @@ validation, formStep ) - $: schemaType = fieldSchema?.type !== "formula" && fieldSchema?.type !== "bigint" ? fieldSchema?.type : "string" // Focus label when editing - let labelNode $: $component.editing && labelNode?.focus() // Update form properties in parent component on every store change @@ -57,7 +59,10 @@ $: labelClass = labelPos === "above" ? "" : `spectrum-FieldLabel--${labelPos}` const updateLabel = e => { - builderStore.actions.updateProp("label", e.target.textContent) + if (touched) { + builderStore.actions.updateProp("label", e.target.textContent) + } + touched = false } onDestroy(() => { @@ -79,6 +84,7 @@ bind:this={labelNode} contenteditable={$component.editing} on:blur={$component.editing ? updateLabel : null} + on:input={() => (touched = true)} class:hidden={!label} class:readonly for={fieldState?.fieldId} diff --git a/packages/client/src/components/app/forms/InnerForm.svelte b/packages/client/src/components/app/forms/InnerForm.svelte index 55c9c938c3..bb2f51fc95 100644 --- a/packages/client/src/components/app/forms/InnerForm.svelte +++ b/packages/client/src/components/app/forms/InnerForm.svelte @@ -206,7 +206,7 @@ error: initialError, disabled: disabled || fieldDisabled || (isAutoColumn && !editAutoColumns), - readonly: readonly || fieldReadOnly, + readonly: readonly || fieldReadOnly || schema?.[field]?.readonly, defaultValue, validator, lastUpdate: Date.now(), diff --git a/packages/client/src/components/app/index.js b/packages/client/src/components/app/index.js index e23e19704c..6d9df6e588 100644 --- a/packages/client/src/components/app/index.js +++ b/packages/client/src/components/app/index.js @@ -37,6 +37,7 @@ export { default as markdownviewer } from "./MarkdownViewer.svelte" export { default as embeddedmap } from "./embedded-map/EmbeddedMap.svelte" export { default as grid } from "./Grid.svelte" export { default as sidepanel } from "./SidePanel.svelte" +export { default as modal } from "./Modal.svelte" export { default as gridblock } from "./GridBlock.svelte" export * from "./charts" export * from "./forms" diff --git a/packages/client/src/components/overlay/ConfirmationDisplay.svelte b/packages/client/src/components/overlay/ConfirmationDisplay.svelte index e7a1046191..b96af502df 100644 --- a/packages/client/src/components/overlay/ConfirmationDisplay.svelte +++ b/packages/client/src/components/overlay/ConfirmationDisplay.svelte @@ -8,6 +8,8 @@ {$confirmationStore.text} diff --git a/packages/client/src/components/preview/IndicatorSet.svelte b/packages/client/src/components/preview/IndicatorSet.svelte index 3cbd7e2464..2b941b2662 100644 --- a/packages/client/src/components/preview/IndicatorSet.svelte +++ b/packages/client/src/components/preview/IndicatorSet.svelte @@ -57,7 +57,9 @@ return } nextState.indicators[idx].visible = - nextState.indicators[idx].insideSidePanel || entries[0].isIntersecting + nextState.indicators[idx].insideModal || + nextState.indicators[idx].insideSidePanel || + entries[0].isIntersecting if (++callbackCount === observers.length) { state = nextState updating = false @@ -139,6 +141,7 @@ height: elBounds.height + 4, visible: false, insideSidePanel: !!child.closest(".side-panel"), + insideModal: !!child.closest(".modal-content"), }) }) } diff --git a/packages/client/src/components/preview/SettingsBar.svelte b/packages/client/src/components/preview/SettingsBar.svelte index b69b8ce050..c5109c6bca 100644 --- a/packages/client/src/components/preview/SettingsBar.svelte +++ b/packages/client/src/components/preview/SettingsBar.svelte @@ -41,7 +41,7 @@ allSettings.push(setting) } }) - return allSettings.filter(setting => setting.showInBar) + return allSettings.filter(setting => setting.showInBar && !setting.hidden) } const updatePosition = () => { diff --git a/packages/client/src/sdk.js b/packages/client/src/sdk.js index 1f996bf656..50d3f857d5 100644 --- a/packages/client/src/sdk.js +++ b/packages/client/src/sdk.js @@ -11,6 +11,7 @@ import { currentRole, environmentStore, sidePanelStore, + modalStore, dndIsDragging, confirmationStore, roleStore, @@ -31,7 +32,7 @@ import { enrichButtonActions } from "./utils/buttonActions.js" import { processStringSync, makePropSafe } from "@budibase/string-templates" import { fetchData, - LuceneUtils, + QueryUtils, Constants, RowUtils, memo, @@ -53,6 +54,7 @@ export default { componentStore, environmentStore, sidePanelStore, + modalStore, dndIsDragging, currentRole, confirmationStore, @@ -65,7 +67,7 @@ export default { getAction, fetchDatasourceSchema, fetchData, - LuceneUtils, + QueryUtils, ContextScopes: Constants.ContextScopes, getAPIKey, enrichButtonActions, diff --git a/packages/client/src/stores/confirmation.js b/packages/client/src/stores/confirmation.js index bb9a54386f..3fbf3d5deb 100644 --- a/packages/client/src/stores/confirmation.js +++ b/packages/client/src/stores/confirmation.js @@ -4,6 +4,8 @@ const initialState = { showConfirmation: false, title: null, text: null, + confirmButtonText: null, + cancelButtonText: null, onConfirm: null, onCancel: null, } @@ -11,11 +13,20 @@ const initialState = { const createConfirmationStore = () => { const store = writable(initialState) - const showConfirmation = (title, text, onConfirm, onCancel) => { + const showConfirmation = ( + title, + text, + onConfirm, + onCancel, + confirmButtonText, + cancelButtonText + ) => { store.set({ showConfirmation: true, title, text, + confirmButtonText, + cancelButtonText, onConfirm, onCancel, }) diff --git a/packages/client/src/stores/index.js b/packages/client/src/stores/index.js index e9b1ce4434..f2b80ed732 100644 --- a/packages/client/src/stores/index.js +++ b/packages/client/src/stores/index.js @@ -27,6 +27,7 @@ export { dndIsDragging, } from "./dnd" export { sidePanelStore } from "./sidePanel" +export { modalStore } from "./modal" export { hoverStore } from "./hover" // Context stores are layered and duplicated, so it is not a singleton diff --git a/packages/client/src/stores/modal.js b/packages/client/src/stores/modal.js new file mode 100644 index 0000000000..4d1331283d --- /dev/null +++ b/packages/client/src/stores/modal.js @@ -0,0 +1,32 @@ +import { writable } from "svelte/store" + +export const createModalStore = () => { + const initialState = { + contentId: null, + } + const store = writable(initialState) + + const open = id => { + store.update(state => { + state.contentId = id + return state + }) + } + + const close = () => { + store.update(state => { + state.contentId = null + return state + }) + } + + return { + subscribe: store.subscribe, + actions: { + open, + close, + }, + } +} + +export const modalStore = createModalStore() diff --git a/packages/client/src/stores/notification.js b/packages/client/src/stores/notification.js index 2a9f9749ec..054117aaba 100644 --- a/packages/client/src/stores/notification.js +++ b/packages/client/src/stores/notification.js @@ -1,7 +1,7 @@ import { writable, get } from "svelte/store" import { routeStore } from "./routes" -const NOTIFICATION_TIMEOUT = 3000 +const DEFAULT_NOTIFICATION_TIMEOUT = 3000 const createNotificationStore = () => { let block = false @@ -18,13 +18,13 @@ const createNotificationStore = () => { type = "info", icon, autoDismiss = true, + duration, count = 1 ) => { if (block) { return } - // If peeking, pass notifications back to parent window if (get(routeStore).queryParams?.peek) { window.parent.postMessage({ type: "notification", @@ -32,11 +32,13 @@ const createNotificationStore = () => { message, type, icon, + duration, autoDismiss, }, }) return } + const _id = id() store.update(state => { const duplicateError = state.find(err => err.message === message) @@ -60,7 +62,7 @@ const createNotificationStore = () => { if (autoDismiss) { setTimeout(() => { dismiss(_id) - }, NOTIFICATION_TIMEOUT) + }, duration || DEFAULT_NOTIFICATION_TIMEOUT) } } @@ -74,14 +76,14 @@ const createNotificationStore = () => { subscribe: store.subscribe, actions: { send, - info: (msg, autoDismiss) => - send(msg, "info", "Info", autoDismiss ?? true), - success: (msg, autoDismiss) => - send(msg, "success", "CheckmarkCircle", autoDismiss ?? true), - warning: (msg, autoDismiss) => - send(msg, "warning", "Alert", autoDismiss ?? true), - error: (msg, autoDismiss) => - send(msg, "error", "Alert", autoDismiss ?? false), + info: (msg, autoDismiss, duration) => + send(msg, "info", "Info", autoDismiss ?? true, duration), + success: (msg, autoDismiss, duration) => + send(msg, "success", "CheckmarkCircle", autoDismiss ?? true, duration), + warning: (msg, autoDismiss, duration) => + send(msg, "warning", "Alert", autoDismiss ?? true, duration), + error: (msg, autoDismiss, duration) => + send(msg, "error", "Alert", autoDismiss ?? false, duration), blockNotifications, dismiss, }, diff --git a/packages/client/src/utils/buttonActions.js b/packages/client/src/utils/buttonActions.js index 25e0892fc0..8f0cb575a7 100644 --- a/packages/client/src/utils/buttonActions.js +++ b/packages/client/src/utils/buttonActions.js @@ -12,6 +12,7 @@ import { uploadStore, rowSelectionStore, sidePanelStore, + modalStore, } from "stores" import { API } from "api" import { ActionTypes } from "constants" @@ -416,11 +417,11 @@ const continueIfHandler = action => { } const showNotificationHandler = action => { - const { message, type, autoDismiss } = action.parameters + const { message, type, autoDismiss, duration } = action.parameters if (!message || !type) { return } - notificationStore.actions[type]?.(message, autoDismiss) + notificationStore.actions[type]?.(message, autoDismiss, duration) } const promptUserHandler = () => {} @@ -436,6 +437,17 @@ const closeSidePanelHandler = () => { sidePanelStore.actions.close() } +const openModalHandler = action => { + const { id } = action.parameters + if (id) { + modalStore.actions.open(id) + } +} + +const closeModalHandler = () => { + modalStore.actions.close() +} + const downloadFileHandler = async action => { const { url, fileName } = action.parameters try { @@ -499,6 +511,8 @@ const handlerMap = { ["Prompt User"]: promptUserHandler, ["Open Side Panel"]: openSidePanelHandler, ["Close Side Panel"]: closeSidePanelHandler, + ["Open Modal"]: openModalHandler, + ["Close Modal"]: closeModalHandler, ["Download File"]: downloadFileHandler, } @@ -508,6 +522,7 @@ const confirmTextMap = { ["Execute Query"]: "Are you sure you want to execute this query?", ["Trigger Automation"]: "Are you sure you want to trigger this automation?", ["Prompt User"]: "Are you sure you want to continue?", + ["Duplicate Row"]: "Are you sure you want to duplicate this row?", } /** @@ -568,6 +583,11 @@ export const enrichButtonActions = (actions, context) => { const defaultTitleText = action["##eventHandlerType"] const customTitleText = action.parameters?.customTitleText || defaultTitleText + const cancelButtonText = + action.parameters?.cancelButtonText || "Cancel" + const confirmButtonText = + action.parameters?.confirmButtonText || "Confirm" + confirmationStore.actions.showConfirmation( customTitleText, confirmText, @@ -598,7 +618,9 @@ export const enrichButtonActions = (actions, context) => { }, () => { resolve(false) - } + }, + confirmButtonText, + cancelButtonText ) }) } diff --git a/packages/client/src/utils/conditions.js b/packages/client/src/utils/conditions.js index 1914e942ad..a23d879e45 100644 --- a/packages/client/src/utils/conditions.js +++ b/packages/client/src/utils/conditions.js @@ -1,4 +1,4 @@ -import { LuceneUtils } from "@budibase/frontend-core" +import { QueryUtils } from "@budibase/frontend-core" export const getActiveConditions = conditions => { if (!conditions?.length) { @@ -33,8 +33,8 @@ export const getActiveConditions = conditions => { value: condition.referenceValue, } - const query = LuceneUtils.buildLuceneQuery([luceneCondition]) - const result = LuceneUtils.runLuceneQuery([luceneCondition], query) + const query = QueryUtils.buildQuery([luceneCondition]) + const result = QueryUtils.runQuery([luceneCondition], query) return result.length > 0 }) } diff --git a/packages/frontend-core/src/api/user.js b/packages/frontend-core/src/api/user.js index 3a815b768e..6a4932d4af 100644 --- a/packages/frontend-core/src/api/user.js +++ b/packages/frontend-core/src/api/user.js @@ -234,6 +234,16 @@ export const buildUserEndpoints = API => ({ }) }, + /** + * Removes multiple user invites from Redis cache + */ + removeUserInvites: async inviteCodes => { + return await API.post({ + url: "/api/global/users/multi/invite/delete", + body: inviteCodes, + }) + }, + /** * Accepts an invite to join the platform and creates a user. * @param inviteCode the invite code sent in the email diff --git a/packages/frontend-core/src/components/FilterBuilder.svelte b/packages/frontend-core/src/components/FilterBuilder.svelte index 82c3a04dc2..5f58c9ea7f 100644 --- a/packages/frontend-core/src/components/FilterBuilder.svelte +++ b/packages/frontend-core/src/components/FilterBuilder.svelte @@ -13,12 +13,11 @@ } from "@budibase/bbui" import { FieldType, SearchFilterOperator } from "@budibase/types" import { generate } from "shortid" - import { LuceneUtils, Constants } from "@budibase/frontend-core" + import { QueryUtils, Constants } from "@budibase/frontend-core" import { getContext } from "svelte" import FilterUsers from "./FilterUsers.svelte" - import { getFields } from "../utils/searchFields" - const { OperatorOptions } = Constants + const { OperatorOptions, DEFAULT_BB_DATASOURCE_ID } = Constants export let schemaFields export let filters = [] @@ -28,6 +27,23 @@ export let allowBindings = false export let filtersLabel = "Filters" + $: { + if ( + tables.find( + table => + table._id === datasource.tableId && + table.sourceId === DEFAULT_BB_DATASOURCE_ID + ) && + !schemaFields.some(field => field.name === "_id") + ) { + schemaFields = [ + ...schemaFields, + { name: "_id", type: "string" }, + { name: "_rev", type: "string" }, + ] + } + } + $: matchAny = filters?.find(filter => filter.operator === "allOr") != null $: onEmptyFilter = filters?.find(filter => filter.onEmptyFilter)?.onEmptyFilter ?? "all" @@ -35,7 +51,6 @@ $: fieldFilters = filters.filter( filter => filter.operator !== "allOr" && !filter.onEmptyFilter ) - const behaviourOptions = [ { value: "and", label: "Match all filters" }, { value: "or", label: "Match any filter" }, @@ -44,12 +59,9 @@ { value: "all", label: "Return all table rows" }, { value: "none", label: "Return no rows" }, ] - const context = getContext("context") - $: fieldOptions = getFields(tables, schemaFields || [], { - allowLinks: true, - }).map(field => ({ + $: fieldOptions = (schemaFields || []).map(field => ({ label: field.displayName || field.name, value: field.name, })) @@ -112,7 +124,7 @@ return [] } - return LuceneUtils.getValidOperatorsForType( + return QueryUtils.getValidOperatorsForType( filter, filter.field || filter.name, datasource diff --git a/packages/frontend-core/src/components/Updating.svelte b/packages/frontend-core/src/components/Updating.svelte index 7d4a101fee..7d14e57aba 100644 --- a/packages/frontend-core/src/components/Updating.svelte +++ b/packages/frontend-core/src/components/Updating.svelte @@ -1,18 +1,22 @@ @@ -54,7 +103,7 @@ quiet size="M" on:click={() => (open = !open)} - selected={open || anyHidden} + selected={open || anyRestricted} disabled={!$columns.length} > {text} @@ -64,27 +113,17 @@
- {#if $stickyColumn} -
- - {$stickyColumn.label} -
- - - {/if} - {#each $columns as column} + {#each displayColumns as column}
- {column.label} +
+ {column.label} +
toggleColumn(column, e.detail)} value={columnToPermissionOptions(column)} - {options} + options={column.options} /> {/each}
@@ -102,7 +141,8 @@ display: grid; align-items: center; grid-template-columns: 1fr auto; - gap: 8px; + grid-row-gap: 8px; + grid-column-gap: 24px; } .columns :global(.spectrum-Switch) { margin-right: 0; @@ -111,4 +151,11 @@ display: flex; gap: 8px; } + .column-label { + min-width: 80px; + max-width: 200px; + text-overflow: ellipsis; + white-space: nowrap; + overflow: hidden; + } diff --git a/packages/frontend-core/src/components/grid/controls/ToggleActionButtonGroup.svelte b/packages/frontend-core/src/components/grid/controls/ToggleActionButtonGroup.svelte index e705b5016d..497e77c2c9 100644 --- a/packages/frontend-core/src/components/grid/controls/ToggleActionButtonGroup.svelte +++ b/packages/frontend-core/src/components/grid/controls/ToggleActionButtonGroup.svelte @@ -7,7 +7,6 @@ export let value export let options - export let disabled
@@ -15,7 +14,7 @@ dispatch("click", option.value)} - {disabled} + disabled={option.disabled} size="S" icon={option.icon} quiet @@ -30,7 +29,6 @@ .permissionPicker { display: flex; gap: var(--spacing-xs); - padding-left: calc(var(--spacing-xl) * 2); } .permissionPicker :global(.spectrum-Icon) { diff --git a/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte b/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte index 20cfdb1ec5..ead2c67787 100644 --- a/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte +++ b/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte @@ -3,6 +3,7 @@ import { Button } from "@budibase/bbui" import GridCell from "../cells/GridCell.svelte" import GridScrollWrapper from "./GridScrollWrapper.svelte" + import { BlankRowID } from "../lib/constants" const { renderedRows, @@ -17,6 +18,7 @@ isDragging, buttonColumnWidth, showVScrollbar, + dispatch, } = getContext("grid") let container @@ -89,6 +91,17 @@
{/each} +
($hoveredRowId = BlankRowID)} + on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} + > + dispatch("add-row-inline")} + /> +
@@ -129,8 +142,11 @@ align-items: center; gap: 4px; } + .blank :global(.cell:hover) { + cursor: pointer; + } - /* Add left cell border */ + /* Add left cell border to all cells */ .button-column :global(.cell) { border-left: var(--cell-border); } diff --git a/packages/frontend-core/src/components/grid/layout/Grid.svelte b/packages/frontend-core/src/components/grid/layout/Grid.svelte index 0a3075a61f..8ea9e2264d 100644 --- a/packages/frontend-core/src/components/grid/layout/Grid.svelte +++ b/packages/frontend-core/src/components/grid/layout/Grid.svelte @@ -26,7 +26,7 @@ MaxCellRenderOverflow, GutterWidth, DefaultRowHeight, - Padding, + VPadding, SmallRowHeight, ControlsHeight, ScrollBarSize, @@ -57,6 +57,7 @@ export let buttons = null export let darkMode export let isCloud = null + export let allowViewReadonlyColumns = false // Unique identifier for DOM nodes inside this instance const gridID = `grid-${Math.random().toString().slice(2)}` @@ -118,7 +119,7 @@ // Derive min height and make available in context const minHeight = derived(rowHeight, $height => { const heightForControls = showControls ? ControlsHeight : 0 - return Padding + SmallRowHeight + $height + heightForControls + return VPadding + SmallRowHeight + $height + heightForControls }) context = { ...context, minHeight } @@ -153,7 +154,7 @@
- +
@@ -353,8 +354,13 @@ transition: none; } - /* Overrides */ - .grid.quiet :global(.grid-data-content .row > .cell:not(:last-child)) { + /* Overrides for quiet */ + .grid.quiet :global(.grid-data-content .row > .cell:not(:last-child)), + .grid.quiet :global(.sticky-column .row > .cell), + .grid.quiet :global(.new-row .row > .cell:not(:last-child)) { border-right: none; } + .grid.quiet :global(.sticky-column:before) { + display: none; + } diff --git a/packages/frontend-core/src/components/grid/layout/GridBody.svelte b/packages/frontend-core/src/components/grid/layout/GridBody.svelte index 87fcfe2d32..cf93f3004e 100644 --- a/packages/frontend-core/src/components/grid/layout/GridBody.svelte +++ b/packages/frontend-core/src/components/grid/layout/GridBody.svelte @@ -2,6 +2,7 @@ import { getContext, onMount } from "svelte" import GridScrollWrapper from "./GridScrollWrapper.svelte" import GridRow from "./GridRow.svelte" + import GridCell from "../cells/GridCell.svelte" import { BlankRowID } from "../lib/constants" import ButtonColumn from "./ButtonColumn.svelte" @@ -23,20 +24,29 @@ 0 ) + const updateBounds = () => { + bounds.set(body.getBoundingClientRect()) + } + onMount(() => { // Observe and record the height of the body - const observer = new ResizeObserver(() => { - bounds.set(body.getBoundingClientRect()) - }) - observer.observe(body) + const resizeObserver = new ResizeObserver(updateBounds) + resizeObserver.observe(body) + + // Capture any wheel events on the page to ensure our scroll offset is + // correct. We don't care about touch events as we only need this for + // hovering over rows with a mouse. + window.addEventListener("wheel", updateBounds, true) + + // Clean up listeners return () => { - observer.disconnect() + resizeObserver.disconnect() + window.removeEventListener("wheel", updateBounds, true) } }) -
{#each $renderedRows as row, idx} @@ -44,13 +54,16 @@ {/each} {#if $config.canAddRows}
($hoveredRowId = BlankRowID)} on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} - on:click={() => dispatch("add-row-inline")} - /> + > + dispatch("add-row-inline")} + /> +
{/if}
{#if $props.buttons?.length} @@ -66,15 +79,13 @@ overflow: hidden; flex: 1 1 auto; } - .blank { - height: var(--row-height); - background: var(--cell-background); - border-bottom: var(--cell-border); - border-right: var(--cell-border); - position: absolute; + .row { + display: flex; + flex-direction: row; + justify-content: flex-start; + align-items: stretch; } - .blank.highlighted { - background: var(--cell-background-hover); + .blank :global(.cell:hover) { cursor: pointer; } diff --git a/packages/frontend-core/src/components/grid/layout/NewRow.svelte b/packages/frontend-core/src/components/grid/layout/NewRow.svelte index 68ace8a5b2..0b27a5d135 100644 --- a/packages/frontend-core/src/components/grid/layout/NewRow.svelte +++ b/packages/frontend-core/src/components/grid/layout/NewRow.svelte @@ -31,6 +31,7 @@ filter, inlineFilters, columnRenderMap, + scrollTop, } = getContext("grid") let visible = false @@ -43,6 +44,21 @@ $: $datasource, (visible = false) $: selectedRowCount = Object.values($selectedRows).length $: hasNoRows = !$rows.length + $: renderedRowCount = $renderedRows.length + $: offset = getOffset($hasNextPage, renderedRowCount, $rowHeight, $scrollTop) + + const getOffset = (hasNextPage, rowCount, rowHeight, scrollTop) => { + // If we have a next page of data then we aren't truly at the bottom, so we + // render the add row component at the top + if (hasNextPage) { + return 0 + } + offset = rowCount * rowHeight - (scrollTop % rowHeight) + if (rowCount !== 0) { + offset -= 1 + } + return offset + } const addRow = async () => { // Blur the active cell and tick to let final value updates propagate @@ -85,23 +101,13 @@ return } - // If we have a next page of data then we aren't truly at the bottom, so we - // render the add row component at the top - if ($hasNextPage) { - offset = 0 - } - // If we don't have a next page then we're at the bottom and can scroll to // the max available offset - else { + if (!$hasNextPage) { scroll.update(state => ({ ...state, top: $maxScrollTop, })) - offset = $renderedRows.length * $rowHeight - ($maxScrollTop % $rowHeight) - if ($renderedRows.length !== 0) { - offset -= 1 - } } // Update state and select initial cell @@ -171,39 +177,41 @@ {#if visible}
0} style="--offset:{offset}px; --sticky-width:{width}px;" >
- - - {#if isAdding} -
- {/if} - - {#if $stickyColumn} - {@const cellId = getCellID(NewRowID, $stickyColumn.name)} - - {#if $stickyColumn?.schema?.autocolumn} -
Can't edit auto column
- {/if} +
+ + {#if isAdding}
{/if} - - {/if} + + {#if $stickyColumn} + {@const cellId = getCellID(NewRowID, $stickyColumn.name)} + + {#if $stickyColumn?.schema?.autocolumn} +
Can't edit auto column
+ {/if} + {#if isAdding} +
+ {/if} + + {/if} +
@@ -270,7 +278,7 @@ margin-left: -6px; } - .container { + .new-row { position: absolute; top: var(--default-row-height); left: 0; @@ -280,10 +288,10 @@ flex-direction: row; align-items: stretch; } - .container :global(.cell) { + .new-row :global(.cell) { --cell-background: var(--spectrum-global-color-gray-75) !important; } - .container.floating :global(.cell) { + .new-row.floating :global(.cell) { height: calc(var(--row-height) + 1px); border-top: var(--cell-border); } @@ -312,8 +320,10 @@ pointer-events: all; z-index: 3; position: absolute; - top: calc(var(--row-height) + var(--offset) + 24px); - left: 18px; + top: calc( + var(--row-height) + var(--offset) + var(--default-row-height) / 2 + ); + left: calc(var(--default-row-height) / 2); } .button-with-keys { display: flex; diff --git a/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte b/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte index b57c89ee4f..85c1eb2897 100644 --- a/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte +++ b/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte @@ -66,62 +66,58 @@ -
- - {#each $renderedRows as row, idx} - {@const rowSelected = !!$selectedRows[row._id]} - {@const rowHovered = $hoveredRowId === row._id} - {@const rowFocused = $focusedRow?._id === row._id} - {@const cellId = getCellID(row._id, $stickyColumn?.name)} -
($hoveredRowId = row._id)} - on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} - on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))} - > - - {#if $stickyColumn} - - {/if} -
- {/each} - {#if $config.canAddRows} -
($hoveredRowId = BlankRowID)} - on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} - on:click={() => dispatch("add-row-inline")} - > - - - - {#if $stickyColumn} - - - - {/if} -
- {/if} -
-
+ + {#each $renderedRows as row, idx} + {@const rowSelected = !!$selectedRows[row._id]} + {@const rowHovered = $hoveredRowId === row._id} + {@const rowFocused = $focusedRow?._id === row._id} + {@const cellId = getCellID(row._id, $stickyColumn?.name)} +
($hoveredRowId = row._id)} + on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} + on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))} + > + + {#if $stickyColumn} + + {/if} +
+ {/each} + {#if $config.canAddRows} +
($hoveredRowId = BlankRowID)} + on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} + on:click={() => dispatch("add-row-inline")} + > + + + + {#if $stickyColumn} + + + + {/if} +
+ {/if} +
diff --git a/packages/frontend-core/src/components/grid/lib/constants.js b/packages/frontend-core/src/components/grid/lib/constants.js index 4b5d04894a..6ea7a98178 100644 --- a/packages/frontend-core/src/components/grid/lib/constants.js +++ b/packages/frontend-core/src/components/grid/lib/constants.js @@ -1,12 +1,13 @@ -export const Padding = 100 -export const ScrollBarSize = 8 -export const GutterWidth = 72 -export const DefaultColumnWidth = 200 -export const MinColumnWidth = 80 export const SmallRowHeight = 36 export const MediumRowHeight = 64 export const LargeRowHeight = 92 export const DefaultRowHeight = SmallRowHeight +export const VPadding = SmallRowHeight * 2 +export const HPadding = 40 +export const ScrollBarSize = 8 +export const GutterWidth = 72 +export const DefaultColumnWidth = 200 +export const MinColumnWidth = 80 export const NewRowID = "new" export const BlankRowID = "blank" export const RowPageSize = 100 diff --git a/packages/frontend-core/src/components/grid/stores/columns.js b/packages/frontend-core/src/components/grid/stores/columns.js index a3281be936..b76dcbfe0e 100644 --- a/packages/frontend-core/src/components/grid/stores/columns.js +++ b/packages/frontend-core/src/components/grid/stores/columns.js @@ -146,6 +146,7 @@ export const initialise = context => { schema: fieldSchema, width: fieldSchema.width || oldColumn?.width || DefaultColumnWidth, visible: fieldSchema.visible ?? true, + readonly: fieldSchema.readonly, order: fieldSchema.order ?? oldColumn?.order, primaryDisplay: field === primaryDisplay, } diff --git a/packages/frontend-core/src/components/grid/stores/datasource.js b/packages/frontend-core/src/components/grid/stores/datasource.js index 1fc973f171..8b8ffdf2cf 100644 --- a/packages/frontend-core/src/components/grid/stores/datasource.js +++ b/packages/frontend-core/src/components/grid/stores/datasource.js @@ -94,6 +94,7 @@ export const createActions = context => { nonPlus, schemaMutations, schema, + notifications, } = context // Gets the appropriate API for the configured datasource type @@ -125,16 +126,25 @@ export const createActions = context => { // Saves the datasource definition const saveDefinition = async newDefinition => { // Update local state + const originalDefinition = get(definition) definition.set(newDefinition) // Update server if (get(config).canSaveSchema) { - await getAPI()?.actions.saveDefinition(newDefinition) + try { + await getAPI()?.actions.saveDefinition(newDefinition) - // Broadcast change so external state can be updated, as this change - // will not be received by the builder websocket because we caused it - // ourselves - dispatch("updatedatasource", newDefinition) + // Broadcast change so external state can be updated, as this change + // will not be received by the builder websocket because we caused it + // ourselves + dispatch("updatedatasource", newDefinition) + } catch (error) { + const msg = error?.message || error || "Unknown error" + get(notifications).error(`Error saving schema: ${msg}`) + + // Reset the definition if saving failed + definition.set(originalDefinition) + } } } @@ -204,6 +214,10 @@ export const createActions = context => { ...$definition, schema: newSchema, }) + resetSchemaMutations() + } + + const resetSchemaMutations = () => { schemaMutations.set({}) } @@ -253,6 +267,7 @@ export const createActions = context => { addSchemaMutation, addSchemaMutations, saveSchemaMutations, + resetSchemaMutations, }, }, } diff --git a/packages/frontend-core/src/components/grid/stores/scroll.js b/packages/frontend-core/src/components/grid/stores/scroll.js index e7114cd00c..814d4cdc8c 100644 --- a/packages/frontend-core/src/components/grid/stores/scroll.js +++ b/packages/frontend-core/src/components/grid/stores/scroll.js @@ -1,6 +1,12 @@ import { writable, derived, get } from "svelte/store" import { tick } from "svelte" -import { Padding, GutterWidth, FocusedCellMinOffset } from "../lib/constants" +import { + GutterWidth, + FocusedCellMinOffset, + ScrollBarSize, + HPadding, + VPadding, +} from "../lib/constants" import { parseCellID } from "../lib/utils" export const createStores = () => { @@ -34,28 +40,15 @@ export const deriveStores = context => { // Memoize store primitives const stickyColumnWidth = derived(stickyColumn, $col => $col?.width || 0, 0) - // Derive vertical limits - const contentHeight = derived( - [rows, rowHeight], - ([$rows, $rowHeight]) => ($rows.length + 1) * $rowHeight + Padding, - 0 - ) - const maxScrollTop = derived( - [height, contentHeight], - ([$height, $contentHeight]) => Math.max($contentHeight - $height, 0), - 0 - ) - // Derive horizontal limits const contentWidth = derived( [visibleColumns, stickyColumnWidth, buttonColumnWidth], ([$visibleColumns, $stickyColumnWidth, $buttonColumnWidth]) => { - const space = Math.max(Padding, $buttonColumnWidth - 1) - let width = GutterWidth + space + $stickyColumnWidth + let width = GutterWidth + $buttonColumnWidth + $stickyColumnWidth $visibleColumns.forEach(col => { width += col.width }) - return width + return width + HPadding }, 0 ) @@ -71,14 +64,6 @@ export const deriveStores = context => { }, 0 ) - - // Derive whether to show scrollbars or not - const showVScrollbar = derived( - [contentHeight, height], - ([$contentHeight, $height]) => { - return $contentHeight > $height - } - ) const showHScrollbar = derived( [contentWidth, screenWidth], ([$contentWidth, $screenWidth]) => { @@ -86,6 +71,30 @@ export const deriveStores = context => { } ) + // Derive vertical limits + const contentHeight = derived( + [rows, rowHeight, showHScrollbar], + ([$rows, $rowHeight, $showHScrollbar]) => { + let height = ($rows.length + 1) * $rowHeight + VPadding + if ($showHScrollbar) { + height += ScrollBarSize * 2 + } + return height + }, + 0 + ) + const maxScrollTop = derived( + [height, contentHeight], + ([$height, $contentHeight]) => Math.max($contentHeight - $height, 0), + 0 + ) + const showVScrollbar = derived( + [contentHeight, height], + ([$contentHeight, $height]) => { + return $contentHeight > $height + } + ) + return { contentHeight, contentWidth, diff --git a/packages/frontend-core/src/constants.js b/packages/frontend-core/src/constants.js index 0d6261f5f8..e5869a3b98 100644 --- a/packages/frontend-core/src/constants.js +++ b/packages/frontend-core/src/constants.js @@ -1,7 +1,11 @@ /** * Operator options for lucene queries */ -export { OperatorOptions, SqlNumberTypeRangeMap } from "@budibase/shared-core" +export { + OperatorOptions, + SqlNumberTypeRangeMap, + DEFAULT_BB_DATASOURCE_ID, +} from "@budibase/shared-core" export { Feature as Features } from "@budibase/types" import { BpmCorrelationKey } from "@budibase/shared-core" import { FieldType, BBReferenceFieldSubType } from "@budibase/types" diff --git a/packages/frontend-core/src/fetch/DataFetch.js b/packages/frontend-core/src/fetch/DataFetch.js index 3a45543c60..dedd06264c 100644 --- a/packages/frontend-core/src/fetch/DataFetch.js +++ b/packages/frontend-core/src/fetch/DataFetch.js @@ -1,10 +1,9 @@ import { writable, derived, get } from "svelte/store" import { cloneDeep } from "lodash/fp" -import { LuceneUtils } from "../utils" +import { QueryUtils } from "../utils" import { convertJSONSchemaToTableSchema } from "../utils/json" -const { buildLuceneQuery, luceneLimit, runLuceneQuery, luceneSort } = - LuceneUtils +const { buildQuery, limit: queryLimit, runQuery, sort } = QueryUtils /** * Parent class which handles the implementation of fetching data from an @@ -177,10 +176,10 @@ export default class DataFetch { } } - // Build the lucene query + // Build the query let query = this.options.query if (!query) { - query = buildLuceneQuery(filter) + query = buildQuery(filter) } // Update store @@ -229,17 +228,17 @@ export default class DataFetch { // If we don't support searching, do a client search if (!this.features.supportsSearch && clientSideSearching) { - rows = runLuceneQuery(rows, query) + rows = runQuery(rows, query) } // If we don't support sorting, do a client-side sort if (!this.features.supportsSort && clientSideSorting) { - rows = luceneSort(rows, sortColumn, sortOrder, sortType) + rows = sort(rows, sortColumn, sortOrder, sortType) } // If we don't support pagination, do a client-side limit if (!this.features.supportsPagination && clientSideLimiting) { - rows = luceneLimit(rows, limit) + rows = queryLimit(rows, limit) } return { diff --git a/packages/frontend-core/src/fetch/UserFetch.js b/packages/frontend-core/src/fetch/UserFetch.js index 65bfe36058..cb2c045cc6 100644 --- a/packages/frontend-core/src/fetch/UserFetch.js +++ b/packages/frontend-core/src/fetch/UserFetch.js @@ -1,7 +1,7 @@ import { get } from "svelte/store" import DataFetch from "./DataFetch.js" import { TableNames } from "../constants" -import { LuceneUtils } from "../utils" +import { QueryUtils } from "../utils" export default class UserFetch extends DataFetch { constructor(opts) { @@ -33,7 +33,7 @@ export default class UserFetch extends DataFetch { let finalQuery // convert old format to new one - we now allow use of the lucene format const { appId, paginated, ...rest } = query - if (!LuceneUtils.hasFilters(query) && rest.email != null) { + if (!QueryUtils.hasFilters(query) && rest.email != null) { finalQuery = { string: { email: rest.email } } } else { finalQuery = rest diff --git a/packages/frontend-core/src/utils/index.js b/packages/frontend-core/src/utils/index.js index 4dad605400..9eb7206012 100644 --- a/packages/frontend-core/src/utils/index.js +++ b/packages/frontend-core/src/utils/index.js @@ -1,4 +1,4 @@ -export { dataFilters as LuceneUtils } from "@budibase/shared-core" +export { dataFilters as QueryUtils } from "@budibase/shared-core" export * as JSONUtils from "./json" export * as CookieUtils from "./cookies" export * as RoleUtils from "./roles" diff --git a/packages/frontend-core/src/utils/utils.js b/packages/frontend-core/src/utils/utils.js index 65690cd535..1bee3d6c04 100644 --- a/packages/frontend-core/src/utils/utils.js +++ b/packages/frontend-core/src/utils/utils.js @@ -161,6 +161,9 @@ export const buildFormBlockButtonConfig = props => { { "##eventHandlerType": "Close Side Panel", }, + { + "##eventHandlerType": "Close Modal", + }, // Clear a create form once submitted ...(actionType !== "Create" ? [] diff --git a/packages/pro b/packages/pro index 5189b83bea..e8f2c5a147 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 5189b83bea1868574ff7f4c51fe5db38a11badb8 +Subproject commit e8f2c5a14780e1f61ec3896821ba5f93d486eb72 diff --git a/packages/server/package.json b/packages/server/package.json index e816ad3f18..94bbb6fc6b 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -12,7 +12,7 @@ "prebuild": "rimraf dist/", "build": "node ./scripts/build.js", "postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets", "build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers", "build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson", @@ -68,7 +68,6 @@ "aws-sdk": "2.1030.0", "bcrypt": "5.1.0", "bcryptjs": "2.4.3", - "bl": "^6.0.12", "bull": "4.10.1", "chokidar": "3.5.3", "content-disposition": "^0.5.4", @@ -97,10 +96,10 @@ "memorystream": "0.3.1", "mongodb": "^6.3.0", "mssql": "10.0.1", - "mysql2": "3.9.7", + "mysql2": "3.9.8", "node-fetch": "2.6.7", "object-sizeof": "2.6.1", - "openai": "^3.2.1", + "openai": "^4.52.1", "openapi-types": "9.3.1", "pg": "8.10.0", "pouchdb": "7.3.0", @@ -110,13 +109,14 @@ "serialize-error": "^7.0.1", "server-destroy": "1.0.1", "snowflake-promise": "^4.5.0", - "socket.io": "4.6.1", - "tar": "6.1.15", + "socket.io": "4.6.2", + "tar": "6.2.1", "to-json-schema": "0.2.5", "uuid": "^8.3.2", "validate.js": "0.13.1", "worker-farm": "1.7.0", - "xml2js": "0.5.0" + "xml2js": "0.5.0", + "tmp": "0.2.3" }, "devDependencies": { "@babel/preset-env": "7.16.11", @@ -137,6 +137,7 @@ "@types/supertest": "2.0.14", "@types/tar": "6.1.5", "@types/uuid": "8.3.4", + "@types/tmp": "0.2.6", "copyfiles": "2.4.1", "docker-compose": "0.23.17", "jest": "29.7.0", @@ -151,7 +152,7 @@ "timekeeper": "2.2.0", "ts-node": "10.8.1", "tsconfig-paths": "4.0.0", - "typescript": "5.2.2", + "typescript": "5.5.2", "update-dotenv": "1.1.1", "yargs": "13.2.4" }, diff --git a/packages/server/scripts/dev/manage.js b/packages/server/scripts/dev/manage.js index 509e9a05ac..3630aeee75 100644 --- a/packages/server/scripts/dev/manage.js +++ b/packages/server/scripts/dev/manage.js @@ -48,6 +48,7 @@ async function init() { HTTP_LOGGING: "0", VERSION: "0.0.0+local", PASSWORD_MIN_LENGTH: "1", + SQS_SEARCH_ENABLE: "1", } config = { ...config, ...existingConfig } diff --git a/packages/server/scripts/integrations/mssql/data/Dockerfile b/packages/server/scripts/integrations/mssql/data/Dockerfile index b8c96e8419..c26af556a9 100644 --- a/packages/server/scripts/integrations/mssql/data/Dockerfile +++ b/packages/server/scripts/integrations/mssql/data/Dockerfile @@ -1,4 +1,4 @@ -FROM mcr.microsoft.com/mssql/server:2017-latest +FROM mcr.microsoft.com/mssql/server:2022-latest ENV ACCEPT_EULA=Y ENV SA_PASSWORD=Passw0rd diff --git a/packages/server/scripts/integrations/postgres/init.sql b/packages/server/scripts/integrations/postgres/init.sql index b7ce1b7d5b..9624208deb 100644 --- a/packages/server/scripts/integrations/postgres/init.sql +++ b/packages/server/scripts/integrations/postgres/init.sql @@ -54,8 +54,31 @@ INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('Mi INSERT INTO Persons (FirstName, LastName, Address, City, Type, Year) VALUES ('John', 'Smith', '64 Updown Road', 'Dublin', 'programmer', 1996); INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Foo', 'Bar', 'Foo Street', 'Bartown', 'support', 0, 1993); INSERT INTO Persons (FirstName, LastName, Address, City, Type) VALUES ('Jonny', 'Muffin', 'Muffin Street', 'Cork', 'support'); -INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (1, 2, 'assembling', TRUE); -INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) VALUES (2, 1, 'processing', FALSE); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Dave', 'Bar', '2 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('James', 'Bar', '3 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Jenny', 'Bar', '4 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Grace', 'Bar', '5 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Sarah', 'Bar', '6 Foo Street', 'Bartown', 'support', 0, 1993); +INSERT INTO Persons (FirstName, LastName, Address, City, Type, Age, Year) VALUES ('Kelly', 'Bar', '7 Foo Street', 'Bartown', 'support', 0, 1993); + +-- insert a lot of tasks for testing +WITH RECURSIVE generate_series AS ( + SELECT 1 AS n + UNION ALL + SELECT n + 1 FROM generate_series WHERE n < 6000 +), +random_data AS ( + SELECT + n, + (random() * 9 + 1)::int AS ExecutorID, + (random() * 9 + 1)::int AS QaID, + 'assembling' AS TaskName, + (random() < 0.5) AS Completed + FROM generate_series +) +INSERT INTO Tasks (ExecutorID, QaID, TaskName, Completed) +SELECT ExecutorID, QaID, TaskName, Completed +FROM random_data; INSERT INTO Products (ProductName) VALUES ('Computers'); INSERT INTO Products (ProductName) VALUES ('Laptops'); INSERT INTO Products (ProductName) VALUES ('Chairs'); diff --git a/packages/server/specs/openapi.json b/packages/server/specs/openapi.json index 7d07b424f0..b21554505b 100644 --- a/packages/server/specs/openapi.json +++ b/packages/server/specs/openapi.json @@ -860,8 +860,10 @@ "json", "internal", "barcodeqr", + "signature_single", "bigint", - "bb_reference" + "bb_reference", + "bb_reference_single" ], "description": "Defines the type of the column, most explain themselves, a link column is a relationship." }, @@ -1067,8 +1069,10 @@ "json", "internal", "barcodeqr", + "signature_single", "bigint", - "bb_reference" + "bb_reference", + "bb_reference_single" ], "description": "Defines the type of the column, most explain themselves, a link column is a relationship." }, @@ -1285,8 +1289,10 @@ "json", "internal", "barcodeqr", + "signature_single", "bigint", - "bb_reference" + "bb_reference", + "bb_reference_single" ], "description": "Defines the type of the column, most explain themselves, a link column is a relationship." }, diff --git a/packages/server/specs/openapi.yaml b/packages/server/specs/openapi.yaml index 3a798c424b..6a2ae89c61 100644 --- a/packages/server/specs/openapi.yaml +++ b/packages/server/specs/openapi.yaml @@ -782,8 +782,10 @@ components: - json - internal - barcodeqr + - signature_single - bigint - bb_reference + - bb_reference_single description: Defines the type of the column, most explain themselves, a link column is a relationship. constraints: @@ -948,8 +950,10 @@ components: - json - internal - barcodeqr + - signature_single - bigint - bb_reference + - bb_reference_single description: Defines the type of the column, most explain themselves, a link column is a relationship. constraints: @@ -1121,8 +1125,10 @@ components: - json - internal - barcodeqr + - signature_single - bigint - bb_reference + - bb_reference_single description: Defines the type of the column, most explain themselves, a link column is a relationship. constraints: diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index e73058239b..830acc55bf 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -358,11 +358,14 @@ async function performAppCreate(ctx: UserCtx) { await createApp(appId) } - // Initialise the app migration version as the latest one - await appMigrations.updateAppMigrationMetadata({ - appId, - version: appMigrations.getLatestMigrationId(), - }) + const latestMigrationId = appMigrations.getLatestEnabledMigrationId() + if (latestMigrationId) { + // Initialise the app migration version as the latest one + await appMigrations.updateAppMigrationMetadata({ + appId, + version: latestMigrationId, + }) + } await cache.app.invalidateAppMetadata(appId, newApplication) return newApplication diff --git a/packages/server/src/api/controllers/auth.ts b/packages/server/src/api/controllers/auth.ts index 9b1b78ed9e..4ff592534d 100644 --- a/packages/server/src/api/controllers/auth.ts +++ b/packages/server/src/api/controllers/auth.ts @@ -1,7 +1,7 @@ import { outputProcessing } from "../../utilities/rowProcessor" import { InternalTables } from "../../db/utils" import { getFullUser } from "../../utilities/users" -import { roles, context } from "@budibase/backend-core" +import { roles, context, db as dbCore } from "@budibase/backend-core" import { ContextUser, Row, UserCtx } from "@budibase/types" import sdk from "../../sdk" import { processUser } from "../../utilities/global" @@ -27,6 +27,8 @@ export async function fetchSelf(ctx: UserCtx) { const appId = context.getAppId() let user: ContextUser = await getFullUser(userId) + // add globalId of user + user.globalId = dbCore.getGlobalIDFromUserMetadataID(userId) // this shouldn't be returned by the app self delete user.roles // forward the csrf token from the session diff --git a/packages/server/src/api/controllers/migrations.ts b/packages/server/src/api/controllers/migrations.ts index c8f786578d..89255e89b1 100644 --- a/packages/server/src/api/controllers/migrations.ts +++ b/packages/server/src/api/controllers/migrations.ts @@ -3,7 +3,7 @@ import { migrate as migrationImpl, MIGRATIONS } from "../../migrations" import { Ctx } from "@budibase/types" import { getAppMigrationVersion, - getLatestMigrationId, + getLatestEnabledMigrationId, } from "../../appMigrations" export async function migrate(ctx: Ctx) { @@ -27,7 +27,9 @@ export async function getMigrationStatus(ctx: Ctx) { const latestAppliedMigration = await getAppMigrationVersion(appId) - const migrated = latestAppliedMigration === getLatestMigrationId() + const latestMigrationId = getLatestEnabledMigrationId() + const migrated = + !latestMigrationId || latestAppliedMigration >= latestMigrationId ctx.body = { migrated } ctx.status = 200 diff --git a/packages/server/src/api/controllers/query/index.ts b/packages/server/src/api/controllers/query/index.ts index b52cea553f..54f672c3f3 100644 --- a/packages/server/src/api/controllers/query/index.ts +++ b/packages/server/src/api/controllers/query/index.ts @@ -311,8 +311,8 @@ export async function preview( // if existing schema, update to include any previous schema keys if (existingSchema) { - for (let key of Object.keys(previewSchema)) { - if (existingSchema[key]) { + for (let key of Object.keys(existingSchema)) { + if (!previewSchema[key]) { previewSchema[key] = existingSchema[key] } } diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index b30c97e289..b51de46e99 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -7,6 +7,7 @@ import { FieldType, FilterType, IncludeRelationship, + isManyToOne, OneToManyRelationshipFieldMetadata, Operation, PaginationJson, @@ -16,29 +17,33 @@ import { SortJson, SortType, Table, - isManyToOne, } from "@budibase/types" import { breakExternalTableId, breakRowIdField, convertRowId, + generateRowIdField, isRowId, isSQL, - generateRowIdField, } from "../../../integrations/utils" import { buildExternalRelationships, buildSqlFieldList, generateIdForRow, - sqlOutputProcessing, + isKnexEmptyReadResponse, isManyToMany, + sqlOutputProcessing, } from "./utils" -import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" +import { + getDatasourceAndQuery, + processRowCountResponse, +} from "../../../sdk/app/rows/utils" import { processObjectSync } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" import { db as dbCore } from "@budibase/backend-core" import sdk from "../../../sdk" import env from "../../../environment" +import { makeExternalQuery } from "../../../integrations/base/query" export interface ManyRelationship { tableId?: string @@ -60,91 +65,12 @@ export interface RunConfig { includeSqlRelationships?: IncludeRelationship } -function buildFilters( - id: string | undefined | string[], - filters: SearchFilters, - table: Table -) { - const primary = table.primary - // if passed in array need to copy for shifting etc - let idCopy: undefined | string | any[] = cloneDeep(id) - if (filters) { - // need to map over the filters and make sure the _id field isn't present - let prefix = 1 - for (let operator of Object.values(filters)) { - for (let field of Object.keys(operator || {})) { - if (dbCore.removeKeyNumbering(field) === "_id") { - if (primary) { - const parts = breakRowIdField(operator[field]) - for (let field of primary) { - operator[`${prefix}:${field}`] = parts.shift() - } - prefix++ - } - // make sure this field doesn't exist on any filter - delete operator[field] - } - } - } - } - // there is no id, just use the user provided filters - if (!idCopy || !table) { - return filters - } - // if used as URL parameter it will have been joined - if (!Array.isArray(idCopy)) { - idCopy = breakRowIdField(idCopy) - } - const equal: any = {} - if (primary && idCopy) { - for (let field of primary) { - // work through the ID and get the parts - equal[field] = idCopy.shift() - } - } - return { - equal, - } -} - -async function removeManyToManyRelationships( - rowId: string, - table: Table, - colName: string -) { - const tableId = table._id! - const filters = buildFilters(rowId, {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - return getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, Operation.DELETE), - body: { [colName]: null }, - filters, - meta: { - table, - }, - }) - } else { - return [] - } -} - -async function removeOneToManyRelationships(rowId: string, table: Table) { - const tableId = table._id! - const filters = buildFilters(rowId, {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - return getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, Operation.UPDATE), - filters, - meta: { - table, - }, - }) - } else { - return [] - } -} +export type ExternalRequestReturnType = + T extends Operation.READ + ? Row[] + : T extends Operation.COUNT + ? number + : { row: Row; table: Table } /** * This function checks the incoming parameters to make sure all the inputs are @@ -200,8 +126,8 @@ function getEndpoint(tableId: string | undefined, operation: string) { } const { datasourceId, tableName } = breakExternalTableId(tableId) return { - datasourceId: datasourceId!, - entityId: tableName!, + datasourceId: datasourceId, + entityId: tableName, operation: operation as Operation, } } @@ -223,14 +149,12 @@ function isEditableColumn(column: FieldSchema) { return !(isExternalAutoColumn || isFormula) } -export type ExternalRequestReturnType = - T extends Operation.READ ? Row[] : { row: Row; table: Table } - export class ExternalRequest { private readonly operation: T private readonly tableId: string private datasource?: Datasource private tables: { [key: string]: Table } = {} + private tableList: Table[] constructor(operation: T, tableId: string, datasource?: Datasource) { this.operation = operation @@ -239,22 +163,134 @@ export class ExternalRequest { if (datasource && datasource.entities) { this.tables = datasource.entities } + this.tableList = Object.values(this.tables) + } + + private prepareFilters( + id: string | undefined | string[], + filters: SearchFilters, + table: Table + ): SearchFilters { + // replace any relationship columns initially, table names and relationship column names are acceptable + const relationshipColumns = sdk.rows.filters.getRelationshipColumns(table) + filters = sdk.rows.filters.updateFilterKeys( + filters, + relationshipColumns.map(({ name, definition }) => { + const { tableName } = breakExternalTableId(definition.tableId) + return { + original: name, + updated: tableName, + } + }) + ) + const primary = table.primary + // if passed in array need to copy for shifting etc + let idCopy: undefined | string | any[] = cloneDeep(id) + if (filters) { + // need to map over the filters and make sure the _id field isn't present + let prefix = 1 + for (let operator of Object.values(filters)) { + for (let field of Object.keys(operator || {})) { + if (dbCore.removeKeyNumbering(field) === "_id") { + if (primary) { + const parts = breakRowIdField(operator[field]) + for (let field of primary) { + operator[`${prefix}:${field}`] = parts.shift() + } + prefix++ + } + // make sure this field doesn't exist on any filter + delete operator[field] + } + } + } + } + // there is no id, just use the user provided filters + if (!idCopy || !table) { + return filters + } + // if used as URL parameter it will have been joined + if (!Array.isArray(idCopy)) { + idCopy = breakRowIdField(idCopy) + } + const equal: SearchFilters["equal"] = {} + if (primary && idCopy) { + for (let field of primary) { + // work through the ID and get the parts + equal[field] = idCopy.shift() + } + } + return { + equal, + } + } + + private async removeManyToManyRelationships( + rowId: string, + table: Table, + colName: string + ) { + const tableId = table._id! + const filters = this.prepareFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.DELETE), + body: { [colName]: null }, + filters, + meta: { + table, + }, + }) + } else { + return [] + } + } + + private async removeOneToManyRelationships(rowId: string, table: Table) { + const tableId = table._id! + const filters = this.prepareFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.UPDATE), + filters, + meta: { + table, + }, + }) + } else { + return [] + } } getTable(tableId: string | undefined): Table | undefined { if (!tableId) { - throw "Table ID is unknown, cannot find table" + throw new Error("Table ID is unknown, cannot find table") } const { tableName } = breakExternalTableId(tableId) - if (tableName) { - return this.tables[tableName] + return this.tables[tableName] + } + + // seeds the object with table and datasource information + async retrieveMetadata( + datasourceId: string + ): Promise<{ tables: Record; datasource: Datasource }> { + if (!this.datasource) { + this.datasource = await sdk.datasources.get(datasourceId) + if (!this.datasource || !this.datasource.entities) { + throw "No tables found, fetch tables before query." + } + this.tables = this.datasource.entities + this.tableList = Object.values(this.tables) } + return { tables: this.tables, datasource: this.datasource } } async getRow(table: Table, rowId: string): Promise { const response = await getDatasourceAndQuery({ endpoint: getEndpoint(table._id!, Operation.READ), - filters: buildFilters(rowId, {}, table), + filters: this.prepareFilters(rowId, {}, table), meta: { table, }, @@ -280,16 +316,20 @@ export class ExternalRequest { manyRelationships: ManyRelationship[] = [] for (let [key, field] of Object.entries(table.schema)) { // if set already, or not set just skip it - if (row[key] === undefined || newRow[key] || !isEditableColumn(field)) { + if (row[key] === undefined || newRow[key]) { + continue + } + if ( + !(this.operation === Operation.BULK_UPSERT) && + !isEditableColumn(field) + ) { continue } // parse floats/numbers if (field.type === FieldType.NUMBER && !isNaN(parseFloat(row[key]))) { newRow[key] = parseFloat(row[key]) } else if (field.type === FieldType.LINK) { - const { tableName: linkTableName } = breakExternalTableId( - field?.tableId - ) + const { tableName: linkTableName } = breakExternalTableId(field.tableId) // table has to exist for many to many if (!linkTableName || !this.tables[linkTableName]) { continue @@ -370,9 +410,6 @@ export class ExternalRequest { [key: string]: { rows: Row[]; isMany: boolean; tableId: string } } = {} const { tableName } = breakExternalTableId(tableId) - if (!tableName) { - return related - } const table = this.tables[tableName] // @ts-ignore const primaryKey = table.primary[0] @@ -428,7 +465,9 @@ export class ExternalRequest { }) // this is the response from knex if no rows found const rows: Row[] = - !Array.isArray(response) || response?.[0].read ? [] : response + !Array.isArray(response) || isKnexEmptyReadResponse(response) + ? [] + : response const storeTo = isManyToMany(field) ? field.throughFrom || linkPrimaryKey : fieldName @@ -503,7 +542,7 @@ export class ExternalRequest { endpoint: getEndpoint(tableId, operation), // if we're doing many relationships then we're writing, only one response body, - filters: buildFilters(id, {}, linkTable), + filters: this.prepareFilters(id, {}, linkTable), meta: { table: linkTable, }, @@ -517,7 +556,7 @@ export class ExternalRequest { // finally cleanup anything that needs to be removed for (let [colName, { isMany, rows, tableId }] of Object.entries(related)) { const table: Table | undefined = this.getTable(tableId) - // if its not the foreign key skip it, nothing to do + // if it's not the foreign key skip it, nothing to do if ( !table || (!isMany && table.primary && table.primary.indexOf(colName) !== -1) @@ -527,8 +566,8 @@ export class ExternalRequest { for (let row of rows) { const rowId = generateIdForRow(row, table) const promise: Promise = isMany - ? removeManyToManyRelationships(rowId, table, colName) - : removeOneToManyRelationships(rowId, table) + ? this.removeManyToManyRelationships(rowId, table, colName) + : this.removeOneToManyRelationships(rowId, table) if (promise) { promises.push(promise) } @@ -551,12 +590,12 @@ export class ExternalRequest { rows.map(row => { const rowId = generateIdForRow(row, table) return isMany - ? removeManyToManyRelationships( + ? this.removeManyToManyRelationships( rowId, table, relationshipColumn.fieldName ) - : removeOneToManyRelationships(rowId, table) + : this.removeOneToManyRelationships(rowId, table) }) ) } @@ -564,21 +603,21 @@ export class ExternalRequest { async run(config: RunConfig): Promise> { const { operation, tableId } = this - let { datasourceId, tableName } = breakExternalTableId(tableId) - if (!tableName) { - throw "Unable to run without a table name" + if (!tableId) { + throw new Error("Unable to run without a table ID") } - if (!this.datasource) { - this.datasource = await sdk.datasources.get(datasourceId!) - if (!this.datasource || !this.datasource.entities) { - throw "No tables found, fetch tables before query." - } - this.tables = this.datasource.entities + let { datasourceId, tableName } = breakExternalTableId(tableId) + let datasource = this.datasource + if (!datasource) { + const { datasource: ds } = await this.retrieveMetadata(datasourceId) + datasource = ds } const table = this.tables[tableName] - let isSql = isSQL(this.datasource) + let isSql = isSQL(datasource) if (!table) { - throw `Unable to process query, table "${tableName}" not defined.` + throw new Error( + `Unable to process query, table "${tableName}" not defined.` + ) } // look for specific components of config which may not be considered acceptable let { id, row, filters, sort, paginate, rows } = cleanupConfig( @@ -601,7 +640,7 @@ export class ExternalRequest { break } } - filters = buildFilters(id, filters || {}, table) + filters = this.prepareFilters(id, filters || {}, table) const relationships = buildExternalRelationships(table, this.tables) const incRelationships = @@ -649,10 +688,15 @@ export class ExternalRequest { body: row || rows, // pass an id filter into extra, purely for mysql/returning extra: { - idFilter: buildFilters(id || generateIdForRow(row, table), {}, table), + idFilter: this.prepareFilters( + id || generateIdForRow(row, table), + {}, + table + ), }, meta: { table, + id: config.id, }, } @@ -662,12 +706,14 @@ export class ExternalRequest { } // aliasing can be disabled fully if desired - let response - if (env.SQL_ALIASING_DISABLE) { - response = await getDatasourceAndQuery(json) - } else { - const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables)) - response = await aliasing.queryWithAliasing(json) + const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables)) + let response = env.SQL_ALIASING_DISABLE + ? await getDatasourceAndQuery(json) + : await aliasing.queryWithAliasing(json, makeExternalQuery) + + // if it's a counting operation there will be no more processing, just return the number + if (this.operation === Operation.COUNT) { + return processRowCountResponse(response) as ExternalRequestReturnType } const responseRows = Array.isArray(response) ? response : [] diff --git a/packages/server/src/api/controllers/row/external.ts b/packages/server/src/api/controllers/row/external.ts index e0e3cb6c18..126b11d0c1 100644 --- a/packages/server/src/api/controllers/row/external.ts +++ b/packages/server/src/api/controllers/row/external.ts @@ -25,6 +25,7 @@ import { outputProcessing, } from "../../../utilities/rowProcessor" import { cloneDeep } from "lodash" +import { generateIdForRow } from "./utils" export async function handleRequest( operation: T, @@ -38,9 +39,10 @@ export async function handleRequest( export async function patch(ctx: UserCtx) { const tableId = utils.getTableId(ctx) - const { _id, ...rowData } = ctx.request.body + const { _id, ...rowData } = ctx.request.body const table = await sdk.tables.getTable(tableId) + const { row: dataToUpdate } = await inputProcessing( ctx.user?._id, cloneDeep(table), @@ -55,11 +57,19 @@ export async function patch(ctx: UserCtx) { throw { validation: validateResult.errors } } + const beforeRow = await sdk.rows.external.getRow(tableId, _id, { + relationships: true, + }) + const response = await handleRequest(Operation.UPDATE, tableId, { id: breakRowIdField(_id), row: dataToUpdate, }) - const row = await sdk.rows.external.getRow(tableId, _id, { + + // The id might have been changed, so the refetching would fail. Recalculating the id just in case + const updatedId = + generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id + const row = await sdk.rows.external.getRow(tableId, updatedId, { relationships: true, }) const enrichedRow = await outputProcessing(table, row, { @@ -70,6 +80,7 @@ export async function patch(ctx: UserCtx) { ...response, row: enrichedRow, table, + oldRow: beforeRow, } } @@ -125,10 +136,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) { const id = ctx.params.rowId const tableId = utils.getTableId(ctx) const { datasourceId, tableName } = breakExternalTableId(tableId) - const datasource: Datasource = await sdk.datasources.get(datasourceId!) - if (!tableName) { - ctx.throw(400, "Unable to find table.") - } + const datasource: Datasource = await sdk.datasources.get(datasourceId) if (!datasource || !datasource.entities) { ctx.throw(400, "Datasource has not been configured for plus API.") } @@ -152,7 +160,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) { } const links = row[fieldName] const linkedTableId = field.tableId - const linkedTableName = breakExternalTableId(linkedTableId).tableName! + const linkedTableName = breakExternalTableId(linkedTableId).tableName const linkedTable = tables[linkedTableName] // don't support composite keys right now const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0]) diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index 8bc73e599a..760b73f404 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -55,13 +55,13 @@ export async function patch( return save(ctx) } try { - const { row, table } = await pickApi(tableId).patch(ctx) + const { row, table, oldRow } = await pickApi(tableId).patch(ctx) if (!row) { ctx.throw(404, "Row not found") } ctx.status = 200 ctx.eventEmitter && - ctx.eventEmitter.emitRow(`row:update`, appId, row, table) + ctx.eventEmitter.emitRow(`row:update`, appId, row, table, oldRow) ctx.message = `${table.name} updated successfully.` ctx.body = row gridSocket?.emitRowUpdate(ctx, row) @@ -84,9 +84,11 @@ export const save = async (ctx: UserCtx) => { if (body && body._id) { return patch(ctx as UserCtx) } - const { row, table, squashed } = await quotas.addRow(() => - sdk.rows.save(tableId, ctx.request.body, ctx.user?._id) - ) + const { row, table, squashed } = tableId.includes("datasource_plus") + ? await sdk.rows.save(tableId, ctx.request.body, ctx.user?._id) + : await quotas.addRow(() => + sdk.rows.save(tableId, ctx.request.body, ctx.user?._id) + ) ctx.status = 200 ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table) ctx.message = `${table.name} saved successfully` @@ -152,7 +154,9 @@ async function deleteRows(ctx: UserCtx) { deleteRequest.rows = await processDeleteRowsRequest(ctx) const { rows } = await pickApi(tableId).bulkDestroy(ctx) - await quotas.removeRows(rows.length) + if (!tableId.includes("datasource_plus")) { + await quotas.removeRows(rows.length) + } for (let row of rows) { ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row) @@ -167,7 +171,9 @@ async function deleteRow(ctx: UserCtx) { const tableId = utils.getTableId(ctx) const resp = await pickApi(tableId).destroy(ctx) - await quotas.removeRow() + if (!tableId.includes("datasource_plus")) { + await quotas.removeRow() + } ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, resp.row) gridSocket?.emitRowDeletion(ctx, resp.row) diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts index cc903bd74a..54d9b6a536 100644 --- a/packages/server/src/api/controllers/row/internal.ts +++ b/packages/server/src/api/controllers/row/internal.ts @@ -85,13 +85,15 @@ export async function patch(ctx: UserCtx) { // the row has been updated, need to put it into the ctx ctx.request.body = row as any await userController.updateMetadata(ctx as any) - return { row: ctx.body as Row, table } + return { row: ctx.body as Row, table, oldRow } } - return finaliseRow(table, row, { + const result = await finaliseRow(table, row, { oldTable: dbTable, updateFormula: true, }) + + return { ...result, oldRow } } export async function find(ctx: UserCtx): Promise { diff --git a/packages/server/src/api/controllers/row/utils/basic.ts b/packages/server/src/api/controllers/row/utils/basic.ts index afb98d0255..bca2494ac3 100644 --- a/packages/server/src/api/controllers/row/utils/basic.ts +++ b/packages/server/src/api/controllers/row/utils/basic.ts @@ -99,7 +99,7 @@ export function basicProcessing({ row, tableName: table._id!, fieldName: internalColumn, - isLinked: false, + isLinked, }) } } diff --git a/packages/server/src/api/controllers/row/utils/sqlUtils.ts b/packages/server/src/api/controllers/row/utils/sqlUtils.ts index 372b8394ff..767916616c 100644 --- a/packages/server/src/api/controllers/row/utils/sqlUtils.ts +++ b/packages/server/src/api/controllers/row/utils/sqlUtils.ts @@ -1,5 +1,9 @@ import { + DatasourcePlusQueryResponse, + DSPlusOperation, FieldType, + isManyToOne, + isOneToMany, ManyToManyRelationshipFieldMetadata, RelationshipFieldMetadata, RelationshipsJson, @@ -91,12 +95,12 @@ export function buildExternalRelationships( ): RelationshipsJson[] { const relationships = [] for (let [fieldName, field] of Object.entries(table.schema)) { - if (field.type !== FieldType.LINK) { + if (field.type !== FieldType.LINK || !field.tableId) { continue } const { tableName: linkTableName } = breakExternalTableId(field.tableId) // no table to link to, this is not a valid relationships - if (!linkTableName || !tables[linkTableName]) { + if (!tables[linkTableName]) { continue } const linkTable = tables[linkTableName] @@ -108,7 +112,7 @@ export function buildExternalRelationships( // need to specify where to put this back into column: fieldName, } - if (isManyToMany(field)) { + if (isManyToMany(field) && field.through) { const { tableName: throughTableName } = breakExternalTableId( field.through ) @@ -118,7 +122,7 @@ export function buildExternalRelationships( definition.to = field.throughFrom || linkTable.primary[0] definition.fromPrimary = table.primary[0] definition.toPrimary = linkTable.primary[0] - } else { + } else if (isManyToOne(field) || isOneToMany(field)) { // if no foreign key specified then use the name of the field in other table definition.from = field.foreignKey || table.primary[0] definition.to = field.fieldName @@ -178,17 +182,27 @@ export function buildSqlFieldList( } let fields = extractRealFields(table) for (let field of Object.values(table.schema)) { - if (field.type !== FieldType.LINK || !opts?.relationships) { + if ( + field.type !== FieldType.LINK || + !opts?.relationships || + !field.tableId + ) { continue } const { tableName: linkTableName } = breakExternalTableId(field.tableId) - if (linkTableName) { - const linkTable = tables[linkTableName] - if (linkTable) { - const linkedFields = extractRealFields(linkTable, fields) - fields = fields.concat(linkedFields) - } + const linkTable = tables[linkTableName] + if (linkTable) { + const linkedFields = extractRealFields(linkTable, fields) + fields = fields.concat(linkedFields) } } return fields } + +export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) { + return ( + !Array.isArray(resp) || + resp.length === 0 || + (DSPlusOperation.READ in resp[0] && resp[0].read === true) + ) +} diff --git a/packages/server/src/api/controllers/row/utils/utils.ts b/packages/server/src/api/controllers/row/utils/utils.ts index c2d62e0204..ae34034221 100644 --- a/packages/server/src/api/controllers/row/utils/utils.ts +++ b/packages/server/src/api/controllers/row/utils/utils.ts @@ -14,7 +14,7 @@ import { processDates, processFormulas, } from "../../../../utilities/rowProcessor" -import { updateRelationshipColumns } from "./sqlUtils" +import { isKnexEmptyReadResponse, updateRelationshipColumns } from "./sqlUtils" import { basicProcessing, generateIdForRow, @@ -137,7 +137,7 @@ export async function sqlOutputProcessing( relationships: RelationshipsJson[], opts?: { sqs?: boolean } ): Promise { - if (!Array.isArray(rows) || rows.length === 0 || rows[0].read === true) { + if (isKnexEmptyReadResponse(rows)) { return [] } let finalRows: { [key: string]: Row } = {} diff --git a/packages/server/src/api/controllers/row/views.ts b/packages/server/src/api/controllers/row/views.ts index 13f6ce8eb0..63ce12f0ab 100644 --- a/packages/server/src/api/controllers/row/views.ts +++ b/packages/server/src/api/controllers/row/views.ts @@ -31,7 +31,7 @@ export async function searchView( // Enrich saved query with ephemeral query params. // We prevent searching on any fields that are saved as part of the query, as // that could let users find rows they should not be allowed to access. - let query = dataFilters.buildLuceneQuery(view.query || []) + let query = dataFilters.buildQuery(view.query || []) if (body.query) { // Extract existing fields const existingFields = @@ -69,6 +69,7 @@ export async function searchView( limit: body.limit, bookmark: body.bookmark, paginate: body.paginate, + countRows: body.countRows, } const result = await sdk.rows.search(searchOptions) diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index 63240c8ecb..8db7816a9c 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -31,7 +31,7 @@ import { } from "@budibase/types" import { getAppMigrationVersion, - getLatestMigrationId, + getLatestEnabledMigrationId, } from "../../../appMigrations" import send from "koa-send" @@ -133,7 +133,7 @@ const requiresMigration = async (ctx: Ctx) => { ctx.throw("AppId could not be found") } - const latestMigration = getLatestMigrationId() + const latestMigration = getLatestEnabledMigrationId() if (!latestMigration) { return false } diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index bd674d7d38..c3356919c8 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -16,14 +16,18 @@ import { import sdk from "../../../sdk" import { builderSocket } from "../../../websockets" import { inputProcessing } from "../../../utilities/rowProcessor" +import { isEqual } from "lodash" function getDatasourceId(table: Table) { if (!table) { - throw "No table supplied" + throw new Error("No table supplied") } if (table.sourceId) { return table.sourceId } + if (!table._id) { + throw new Error("No table ID supplied") + } return breakExternalTableId(table._id).datasourceId } @@ -82,15 +86,30 @@ export async function bulkImport( ctx: UserCtx ) { let table = await sdk.tables.getTable(ctx.params.tableId) - const { rows } = ctx.request.body + const { rows, identifierFields } = ctx.request.body const schema = table.schema + if ( + identifierFields && + identifierFields.length > 0 && + !isEqual(identifierFields, table.primary) + ) { + // This is becuse we make use of the ON CONFLICT functionality in SQL + // databases, which only triggers when there's a conflict against a unique + // index. The only unique index we can count on atm in Budibase is the + // primary key, so this functionality always uses the primary key. + ctx.throw( + 400, + "Identifier fields are not supported for bulk import into an external datasource." + ) + } + if (!rows || !isRows(rows) || !isSchema(schema)) { ctx.throw(400, "Provided data import information is invalid.") } const parsedRows = [] - for (const row of parse(rows, schema)) { + for (const row of parse(rows, table)) { const processed = await inputProcessing(ctx.user?._id, table, row, { noAutoRelationships: true, }) @@ -98,7 +117,7 @@ export async function bulkImport( table = processed.table } - await handleRequest(Operation.BULK_CREATE, table._id!, { + await handleRequest(Operation.BULK_UPSERT, table._id!, { rows: parsedRows, }) await events.rows.imported(table, parsedRows.length) diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index a42cfc43c3..0e9a32b294 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -178,7 +178,7 @@ export async function handleDataImport( } const db = context.getAppDB() - const data = parse(importRows, schema) + const data = parse(importRows, table) let finalData: any = await importToRows(data, table, user) diff --git a/packages/server/src/api/routes/row.ts b/packages/server/src/api/routes/row.ts index f1aa39a461..e443b2daeb 100644 --- a/packages/server/src/api/routes/row.ts +++ b/packages/server/src/api/routes/row.ts @@ -86,6 +86,7 @@ router router.post( "/api/v2/views/:viewId/search", + internalSearchValidator(), authorizedResource(PermissionType.VIEW, PermissionLevel.READ, "viewId"), rowController.views.searchView ) diff --git a/packages/server/src/api/routes/tests/__snapshots__/datasource.spec.ts.snap b/packages/server/src/api/routes/tests/__snapshots__/datasource.spec.ts.snap deleted file mode 100644 index 57d79db24b..0000000000 --- a/packages/server/src/api/routes/tests/__snapshots__/datasource.spec.ts.snap +++ /dev/null @@ -1,91 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`/datasources fetch returns all the datasources from the server 1`] = ` -[ - { - "config": {}, - "entities": [ - { - "_id": "ta_users", - "_rev": "1-73b7912e6cbdd3d696febc60f3715844", - "createdAt": "2020-01-01T00:00:00.000Z", - "name": "Users", - "primaryDisplay": "email", - "schema": { - "email": { - "constraints": { - "email": true, - "length": { - "maximum": "", - }, - "presence": true, - "type": "string", - }, - "name": "email", - "type": "string", - }, - "firstName": { - "constraints": { - "presence": false, - "type": "string", - }, - "name": "firstName", - "type": "string", - }, - "lastName": { - "constraints": { - "presence": false, - "type": "string", - }, - "name": "lastName", - "type": "string", - }, - "roleId": { - "constraints": { - "inclusion": [ - "ADMIN", - "POWER", - "BASIC", - "PUBLIC", - ], - "presence": false, - "type": "string", - }, - "name": "roleId", - "type": "options", - }, - "status": { - "constraints": { - "inclusion": [ - "active", - "inactive", - ], - "presence": false, - "type": "string", - }, - "name": "status", - "type": "options", - }, - }, - "sourceId": "bb_internal", - "sourceType": "internal", - "type": "table", - "updatedAt": "2020-01-01T00:00:00.000Z", - "views": {}, - }, - ], - "name": "Budibase DB", - "source": "BUDIBASE", - "type": "budibase", - }, - { - "config": {}, - "createdAt": "2020-01-01T00:00:00.000Z", - "isSQL": true, - "name": "Test", - "source": "POSTGRES", - "type": "datasource", - "updatedAt": "2020-01-01T00:00:00.000Z", - }, -] -`; diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index 7f89a5cac2..13b7451a7e 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -334,6 +334,12 @@ describe("/applications", () => { expect(events.app.deleted).toHaveBeenCalledTimes(1) expect(events.app.unpublished).toHaveBeenCalledTimes(1) }) + + it("should be able to delete an app after SQS_SEARCH_ENABLE has been set but app hasn't been migrated", async () => { + await config.withCoreEnv({ SQS_SEARCH_ENABLE: "true" }, async () => { + await config.api.application.delete(app.appId) + }) + }) }) describe("POST /api/applications/:appId/duplicate", () => { diff --git a/packages/server/src/api/routes/tests/auth.spec.js b/packages/server/src/api/routes/tests/auth.spec.js index 5fbead99b7..64952db17c 100644 --- a/packages/server/src/api/routes/tests/auth.spec.js +++ b/packages/server/src/api/routes/tests/auth.spec.js @@ -1,5 +1,8 @@ const setup = require("./utilities") -const { generateUserMetadataID } = require("../../../db/utils") +const { + generateUserMetadataID, + getGlobalIDFromUserMetadataID, +} = require("../../../db/utils") describe("/authenticate", () => { let request = setup.getRequest() @@ -20,5 +23,16 @@ describe("/authenticate", () => { .expect(200) expect(res.body._id).toEqual(generateUserMetadataID(config.user._id)) }) + + it("should container the global user ID", async () => { + const res = await request + .get(`/api/self`) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + expect(res.body.globalId).toEqual( + getGlobalIDFromUserMetadataID(config.user._id) + ) + }) }) }) diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts index 711cfb8d4f..8cbd14d8b3 100644 --- a/packages/server/src/api/routes/tests/automation.spec.ts +++ b/packages/server/src/api/routes/tests/automation.spec.ts @@ -13,6 +13,7 @@ import { events } from "@budibase/backend-core" import sdk from "../../../sdk" import { Automation } from "@budibase/types" import { mocks } from "@budibase/backend-core/tests" +import { FilterConditions } from "../../../automations/steps/filter" const MAX_RETRIES = 4 let { @@ -21,6 +22,7 @@ let { automationTrigger, automationStep, collectAutomation, + filterAutomation, } = setup.structures describe("/automations", () => { @@ -155,7 +157,12 @@ describe("/automations", () => { automation.appId = config.appId automation = await config.createAutomation(automation) await setup.delay(500) - const res = await testAutomation(config, automation) + const res = await testAutomation(config, automation, { + row: { + name: "Test", + description: "TEST", + }, + }) expect(events.automation.tested).toHaveBeenCalledTimes(1) // this looks a bit mad but we don't actually have a way to wait for a response from the automation to // know that it has finished all of its actions - this is currently the best way @@ -436,4 +443,38 @@ describe("/automations", () => { expect(res).toEqual(true) }) }) + + describe("Update Row Old / New Row comparison", () => { + it.each([ + { oldCity: "asdsadsadsad", newCity: "new" }, + { oldCity: "Belfast", newCity: "Belfast" }, + ])( + "triggers an update row automation and compares new to old rows with old city '%s' and new city '%s'", + async ({ oldCity, newCity }) => { + const expectedResult = oldCity === newCity + + let table = await config.createTable() + + let automation = await filterAutomation() + automation.definition.trigger.inputs.tableId = table._id + automation.definition.steps[0].inputs = { + condition: FilterConditions.EQUAL, + field: "{{ trigger.row.City }}", + value: "{{ trigger.oldRow.City }}", + } + automation.appId = config.appId! + automation = await config.createAutomation(automation) + let triggerInputs = { + oldRow: { + City: oldCity, + }, + row: { + City: newCity, + }, + } + const res = await testAutomation(config, automation, triggerInputs) + expect(res.body.steps[1].outputs.result).toEqual(expectedResult) + } + ) + }) }) diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index f2cea90675..6f249f5016 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -4,14 +4,12 @@ import { getCachedVariable } from "../../../threads/utils" import { context, events } from "@budibase/backend-core" import sdk from "../../../sdk" -import tk from "timekeeper" -import { mocks } from "@budibase/backend-core/tests" +import { generator } from "@budibase/backend-core/tests" import { Datasource, FieldSchema, BBReferenceFieldSubType, FieldType, - QueryPreview, RelationshipType, SourceName, Table, @@ -21,36 +19,34 @@ import { import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import { tableForDatasource } from "../../../tests/utilities/structures" -tk.freeze(mocks.date.MOCK_DATE) - -let { basicDatasource } = setup.structures - describe("/datasources", () => { - let request = setup.getRequest() - let config = setup.getConfig() - let datasource: any + const config = setup.getConfig() + let datasource: Datasource + beforeAll(async () => { + await config.init() + }) afterAll(setup.afterAll) - async function setupTest() { - await config.init() - datasource = await config.createDatasource() + beforeEach(async () => { + datasource = await config.api.datasource.create({ + type: "datasource", + name: "Test", + source: SourceName.POSTGRES, + config: {}, + }) jest.clearAllMocks() - } - - beforeAll(setupTest) + }) describe("create", () => { it("should create a new datasource", async () => { - const res = await request - .post(`/api/datasources`) - .send(basicDatasource()) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - - expect(res.body.datasource.name).toEqual("Test") - expect(res.body.errors).toEqual({}) + const ds = await config.api.datasource.create({ + type: "datasource", + name: "Test", + source: SourceName.POSTGRES, + config: {}, + }) + expect(ds.name).toEqual("Test") expect(events.datasource.created).toHaveBeenCalledTimes(1) }) @@ -72,88 +68,71 @@ describe("/datasources", () => { }) }) - describe("update", () => { - it("should update an existing datasource", async () => { - datasource.name = "Updated Test" - const res = await request - .put(`/api/datasources/${datasource._id}`) - .send(datasource) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + describe("dynamic variables", () => { + it("should invalidate changed or removed variables", async () => { + let datasource = await config.api.datasource.create({ + type: "datasource", + name: "Rest", + source: SourceName.REST, + config: {}, + }) - expect(res.body.datasource.name).toEqual("Updated Test") - expect(res.body.errors).toBeUndefined() - expect(events.datasource.updated).toHaveBeenCalledTimes(1) - }) + const query = await config.api.query.save({ + datasourceId: datasource._id!, + fields: { + path: "www.google.com", + }, + parameters: [], + transformer: null, + queryVerb: "read", + name: datasource.name!, + schema: {}, + readable: true, + }) - describe("dynamic variables", () => { - async function preview( - datasource: any, - fields: { path: string; queryString: string } - ) { - const queryPreview: QueryPreview = { - fields, - datasourceId: datasource._id, - parameters: [], - transformer: null, - queryVerb: "read", - name: datasource.name, - schema: {}, - readable: true, - } - return config.api.query.preview(queryPreview) - } + datasource = await config.api.datasource.update({ + ...datasource, + config: { + dynamicVariables: [ + { + queryId: query._id, + name: "variable3", + value: "{{ data.0.[value] }}", + }, + ], + }, + }) - it("should invalidate changed or removed variables", async () => { - const { datasource, query } = await config.dynamicVariableDatasource() - // preview once to cache variables - await preview(datasource, { + // preview once to cache variables + await config.api.query.preview({ + fields: { path: "www.example.com", queryString: "test={{ variable3 }}", - }) - // check variables in cache - let contents = await getCachedVariable(query._id!, "variable3") - expect(contents.rows.length).toEqual(1) - - // update the datasource to remove the variables - datasource.config!.dynamicVariables = [] - const res = await request - .put(`/api/datasources/${datasource._id}`) - .send(datasource) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.errors).toBeUndefined() - - // check variables no longer in cache - contents = await getCachedVariable(query._id!, "variable3") - expect(contents).toBe(null) + }, + datasourceId: datasource._id!, + parameters: [], + transformer: null, + queryVerb: "read", + name: datasource.name!, + schema: {}, + readable: true, }) + + // check variables in cache + let contents = await getCachedVariable(query._id!, "variable3") + expect(contents.rows.length).toEqual(1) + + // update the datasource to remove the variables + datasource.config!.dynamicVariables = [] + await config.api.datasource.update(datasource) + + // check variables no longer in cache + contents = await getCachedVariable(query._id!, "variable3") + expect(contents).toBe(null) }) }) - describe("fetch", () => { - beforeAll(setupTest) - - it("returns all the datasources from the server", async () => { - const res = await request - .get(`/api/datasources`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - - const datasources = res.body - - // remove non-deterministic fields - for (let source of datasources) { - delete source._id - delete source._rev - } - - expect(datasources).toMatchSnapshot() - }) - + describe("permissions", () => { it("should apply authorization to endpoint", async () => { await checkBuilderEndpoint({ config, @@ -161,41 +140,8 @@ describe("/datasources", () => { url: `/api/datasources`, }) }) - }) - describe("find", () => { - it("should be able to find a datasource", async () => { - const res = await request - .get(`/api/datasources/${datasource._id}`) - .set(config.defaultHeaders()) - .expect(200) - expect(res.body._rev).toBeDefined() - expect(res.body._id).toEqual(datasource._id) - }) - }) - - describe("destroy", () => { - beforeAll(setupTest) - - it("deletes queries for the datasource after deletion and returns a success message", async () => { - await config.createQuery() - - await request - .delete(`/api/datasources/${datasource._id}/${datasource._rev}`) - .set(config.defaultHeaders()) - .expect(200) - - const res = await request - .get(`/api/datasources`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - - expect(res.body.length).toEqual(1) - expect(events.datasource.deleted).toHaveBeenCalledTimes(1) - }) - - it("should apply authorization to endpoint", async () => { + it("should apply authorization to delete endpoint", async () => { await checkBuilderEndpoint({ config, method: "DELETE", @@ -204,175 +150,296 @@ describe("/datasources", () => { }) }) - describe("check secret replacement", () => { - async function makeDatasource() { - datasource = basicDatasource() - datasource.datasource.config.password = "testing" - const res = await request - .post(`/api/datasources`) - .send(datasource) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - return res.body.datasource - } - - it("should save a datasource with password", async () => { - const datasource = await makeDatasource() - expect(datasource.config.password).toBe("--secret-value--") - }) - - it("should not the password on update with the --secret-value--", async () => { - const datasource = await makeDatasource() - await request - .put(`/api/datasources/${datasource._id}`) - .send(datasource) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - await context.doInAppContext(config.getAppId(), async () => { - const dbDatasource: any = await sdk.datasources.get(datasource._id) - expect(dbDatasource.config.password).toBe("testing") - }) - }) - }) - describe.each([ [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], - ])("fetch schema (%s)", (_, dsProvider) => { - beforeAll(async () => { - datasource = await config.api.datasource.create(await dsProvider) + ])("%s", (_, dsProvider) => { + let rawDatasource: Datasource + beforeEach(async () => { + rawDatasource = await dsProvider + datasource = await config.api.datasource.create(rawDatasource) }) - it("fetching schema will not drop tables or columns", async () => { - const datasourceId = datasource!._id! + describe("get", () => { + it("should be able to get a datasource", async () => { + const ds = await config.api.datasource.get(datasource._id!) + expect(ds).toEqual({ + config: expect.any(Object), + plus: datasource.plus, + source: datasource.source, + isSQL: true, + type: "datasource_plus", + _id: datasource._id, + _rev: expect.any(String), + createdAt: expect.any(String), + updatedAt: expect.any(String), + }) + }) - const simpleTable = await config.api.table.save( - tableForDatasource(datasource, { - name: "simple", - schema: { - name: { - name: "name", - type: FieldType.STRING, + it("should not return database password", async () => { + const ds = await config.api.datasource.get(datasource._id!) + expect(ds.config!.password).toBe("--secret-value--") + }) + }) + + describe("list", () => { + it("returns all the datasources", async () => { + const datasources = await config.api.datasource.fetch() + expect(datasources).toContainEqual(expect.objectContaining(datasource)) + }) + }) + + describe("put", () => { + it("should update an existing datasource", async () => { + const newName = generator.guid() + datasource.name = newName + const updatedDs = await config.api.datasource.update(datasource) + expect(updatedDs.name).toEqual(newName) + expect(events.datasource.updated).toHaveBeenCalledTimes(1) + }) + + it("should not overwrite database password with --secret-value--", async () => { + const password = await context.doInAppContext( + config.getAppId(), + async () => { + const ds = await sdk.datasources.get(datasource._id!) + return ds.config!.password + } + ) + + expect(password).not.toBe("--secret-value--") + + const ds = await config.api.datasource.get(datasource._id!) + expect(ds.config!.password).toBe("--secret-value--") + + await config.api.datasource.update( + await config.api.datasource.get(datasource._id!) + ) + + const newPassword = await context.doInAppContext( + config.getAppId(), + async () => { + const ds = await sdk.datasources.get(datasource._id!) + return ds.config!.password + } + ) + + expect(newPassword).not.toBe("--secret-value--") + expect(newPassword).toBe(password) + }) + }) + + describe("destroy", () => { + it("deletes queries for the datasource after deletion and returns a success message", async () => { + await config.api.query.save({ + datasourceId: datasource._id!, + name: "Test Query", + parameters: [], + fields: {}, + schema: {}, + queryVerb: "read", + transformer: null, + readable: true, + }) + + await config.api.datasource.delete(datasource) + const datasources = await config.api.datasource.fetch() + expect(datasources).not.toContainEqual( + expect.objectContaining(datasource) + ) + expect(events.datasource.deleted).toHaveBeenCalledTimes(1) + }) + }) + + describe("schema", () => { + it("fetching schema will not drop tables or columns", async () => { + const datasourceId = datasource!._id! + + const simpleTable = await config.api.table.save( + tableForDatasource(datasource, { + name: "simple", + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + const stringName = "string" + const fullSchema: { + [type in SupportedSqlTypes]: FieldSchema & { type: type } + } = { + [FieldType.STRING]: { + name: stringName, + type: FieldType.STRING, + constraints: { + presence: true, }, }, - }) - ) - - const fullSchema: { - [type in SupportedSqlTypes]: FieldSchema & { type: type } - } = { - [FieldType.STRING]: { - name: "string", - type: FieldType.STRING, - constraints: { - presence: true, + [FieldType.LONGFORM]: { + name: "longform", + type: FieldType.LONGFORM, }, - }, - [FieldType.LONGFORM]: { - name: "longform", - type: FieldType.LONGFORM, - }, - [FieldType.OPTIONS]: { - name: "options", - type: FieldType.OPTIONS, - constraints: { - presence: { allowEmpty: false }, - }, - }, - [FieldType.NUMBER]: { - name: "number", - type: FieldType.NUMBER, - }, - [FieldType.BOOLEAN]: { - name: "boolean", - type: FieldType.BOOLEAN, - }, - [FieldType.ARRAY]: { - name: "array", - type: FieldType.ARRAY, - }, - [FieldType.DATETIME]: { - name: "datetime", - type: FieldType.DATETIME, - dateOnly: true, - timeOnly: false, - }, - [FieldType.LINK]: { - name: "link", - type: FieldType.LINK, - tableId: simpleTable._id!, - relationshipType: RelationshipType.ONE_TO_MANY, - fieldName: "link", - }, - [FieldType.FORMULA]: { - name: "formula", - type: FieldType.FORMULA, - formula: "any formula", - }, - [FieldType.BARCODEQR]: { - name: "barcodeqr", - type: FieldType.BARCODEQR, - }, - [FieldType.BIGINT]: { - name: "bigint", - type: FieldType.BIGINT, - }, - [FieldType.BB_REFERENCE]: { - name: "bb_reference", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - }, - [FieldType.BB_REFERENCE_SINGLE]: { - name: "bb_reference_single", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - } - - await config.api.table.save( - tableForDatasource(datasource, { - name: "full", - schema: fullSchema, - }) - ) - - const persisted = await config.api.datasource.get(datasourceId) - await config.api.datasource.fetchSchema(datasourceId) - - const updated = await config.api.datasource.get(datasourceId) - const expected: Datasource = { - ...persisted, - entities: - persisted?.entities && - Object.entries(persisted.entities).reduce>( - (acc, [tableName, table]) => { - acc[tableName] = { - ...table, - primaryDisplay: expect.not.stringMatching( - new RegExp(`^${table.primaryDisplay || ""}$`) - ), - schema: Object.entries(table.schema).reduce( - (acc, [fieldName, field]) => { - acc[fieldName] = expect.objectContaining({ - ...field, - }) - return acc - }, - {} - ), - } - return acc + [FieldType.OPTIONS]: { + name: "options", + type: FieldType.OPTIONS, + constraints: { + presence: { allowEmpty: false }, }, - {} - ), + }, + [FieldType.NUMBER]: { + name: "number", + type: FieldType.NUMBER, + }, + [FieldType.BOOLEAN]: { + name: "boolean", + type: FieldType.BOOLEAN, + }, + [FieldType.ARRAY]: { + name: "array", + type: FieldType.ARRAY, + }, + [FieldType.DATETIME]: { + name: "datetime", + type: FieldType.DATETIME, + dateOnly: true, + timeOnly: false, + }, + [FieldType.LINK]: { + name: "link", + type: FieldType.LINK, + tableId: simpleTable._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + fieldName: "link", + }, + [FieldType.FORMULA]: { + name: "formula", + type: FieldType.FORMULA, + formula: "any formula", + }, + [FieldType.BARCODEQR]: { + name: "barcodeqr", + type: FieldType.BARCODEQR, + }, + [FieldType.BIGINT]: { + name: "bigint", + type: FieldType.BIGINT, + }, + [FieldType.BB_REFERENCE]: { + name: "bb_reference", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + }, + [FieldType.BB_REFERENCE_SINGLE]: { + name: "bb_reference_single", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + } - _rev: expect.any(String), - } - expect(updated).toEqual(expected) + await config.api.table.save( + tableForDatasource(datasource, { + name: "full", + schema: fullSchema, + }) + ) + + const persisted = await config.api.datasource.get(datasourceId) + await config.api.datasource.fetchSchema({ datasourceId }) + + const updated = await config.api.datasource.get(datasourceId) + const expected: Datasource = { + ...persisted, + entities: + persisted?.entities && + Object.entries(persisted.entities).reduce>( + (acc, [tableName, table]) => { + acc[tableName] = { + ...table, + primaryDisplay: expect.not.stringMatching( + new RegExp(`^${table.primaryDisplay || ""}$`) + ), + schema: Object.entries(table.schema).reduce( + (acc, [fieldName, field]) => { + // the constraint will be unset - as the DB doesn't recognise it as not null + if (fieldName === stringName) { + field.constraints = {} + } + acc[fieldName] = expect.objectContaining({ + ...field, + }) + return acc + }, + {} + ), + } + return acc + }, + {} + ), + + _rev: expect.any(String), + updatedAt: expect.any(String), + } + expect(updated).toEqual(expected) + }) + }) + + describe("verify", () => { + it("should be able to verify the connection", async () => { + await config.api.datasource.verify( + { + datasource: rawDatasource, + }, + { + body: { + connected: true, + }, + } + ) + }) + + it("should state an invalid datasource cannot connect", async () => { + await config.api.datasource.verify( + { + datasource: { + ...rawDatasource, + config: { + ...rawDatasource.config, + password: "wrongpassword", + }, + }, + }, + { + body: { + connected: false, + error: /.*/, // error message differs between databases + }, + } + ) + }) + }) + + describe("info", () => { + it("should fetch information about postgres datasource", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + const info = await config.api.datasource.info(datasource) + expect(info.tableNames).toContain(table.name) + }) }) }) }) diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts index e8a38dcfaa..3ed19f5eee 100644 --- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts +++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts @@ -10,37 +10,11 @@ import * as setup from "../utilities" import { DatabaseName, getDatasource, - rawQuery, + knexClient, } from "../../../../integrations/tests/utils" import { Expectations } from "src/tests/utilities/api/base" import { events } from "@budibase/backend-core" - -const createTableSQL: Record = { - [SourceName.POSTGRES]: ` - CREATE TABLE test_table ( - id serial PRIMARY KEY, - name VARCHAR ( 50 ) NOT NULL, - birthday TIMESTAMP, - number INT - );`, - [SourceName.MYSQL]: ` - CREATE TABLE test_table ( - id INT AUTO_INCREMENT PRIMARY KEY, - name VARCHAR(50) NOT NULL, - birthday TIMESTAMP, - number INT - );`, - [SourceName.SQL_SERVER]: ` - CREATE TABLE test_table ( - id INT IDENTITY(1,1) PRIMARY KEY, - name NVARCHAR(50) NOT NULL, - birthday DATETIME, - number INT - );`, -} - -const insertSQL = `INSERT INTO test_table (name) VALUES ('one'), ('two'), ('three'), ('four'), ('five')` -const dropTableSQL = `DROP TABLE test_table;` +import { Knex } from "knex" describe.each( [ @@ -53,6 +27,7 @@ describe.each( const config = setup.getConfig() let rawDatasource: Datasource let datasource: Datasource + let client: Knex async function createQuery( query: Partial, @@ -82,21 +57,34 @@ describe.each( rawDatasource = await dsProvider datasource = await config.api.datasource.create(rawDatasource) - // The Datasource API does not return the password, but we need - // it later to connect to the underlying database, so we fill it - // back in here. + // The Datasource API doesn ot return the password, but we need it later to + // connect to the underlying database, so we fill it back in here. datasource.config!.password = rawDatasource.config!.password - await rawQuery(datasource, createTableSQL[datasource.source]) - await rawQuery(datasource, insertSQL) + client = await knexClient(rawDatasource) + + await client.schema.dropTableIfExists("test_table") + await client.schema.createTable("test_table", table => { + table.increments("id").primary() + table.string("name") + table.timestamp("birthday") + table.integer("number") + }) + + await client("test_table").insert([ + { name: "one" }, + { name: "two" }, + { name: "three" }, + { name: "four" }, + { name: "five" }, + ]) jest.clearAllMocks() }) afterEach(async () => { const ds = await config.api.datasource.get(datasource._id!) - config.api.datasource.delete(ds) - await rawQuery(datasource, dropTableSQL) + await config.api.datasource.delete(ds) }) afterAll(async () => { @@ -207,7 +195,7 @@ describe.each( }, }) - await config.publish() + await config.api.application.publish(config.getAppId()) const prodQuery = await config.api.query.getProd(query._id!) expect(prodQuery._id).toEqual(query._id) @@ -262,6 +250,67 @@ describe.each( expect(events.query.previewed).toHaveBeenCalledTimes(1) }) + it("should update schema when column type changes from number to string", async () => { + const tableName = "schema_change_test" + await client.schema.dropTableIfExists(tableName) + + await client.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + table.integer("data") + }) + + await client(tableName).insert({ + name: "test", + data: 123, + }) + + const firstPreview = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "Test Query", + queryVerb: "read", + fields: { + sql: `SELECT * FROM ${tableName}`, + }, + parameters: [], + transformer: "return data", + schema: {}, + readable: true, + }) + + expect(firstPreview.schema).toEqual( + expect.objectContaining({ + data: { type: "number", name: "data" }, + }) + ) + + await client.schema.alterTable(tableName, table => { + table.string("data").alter() + }) + + await client(tableName).update({ + data: "string value", + }) + + const secondPreview = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "Test Query", + queryVerb: "read", + fields: { + sql: `SELECT * FROM ${tableName}`, + }, + parameters: [], + transformer: "return data", + schema: firstPreview.schema, + readable: true, + }) + + expect(secondPreview.schema).toEqual( + expect.objectContaining({ + data: { type: "string", name: "data" }, + }) + ) + }) it("should work with static variables", async () => { await config.api.datasource.update({ ...datasource, @@ -429,11 +478,11 @@ describe.each( }, ]) - const rows = await rawQuery( - datasource, - "SELECT * FROM test_table WHERE name = 'baz'" - ) + const rows = await client("test_table").where({ name: "baz" }).select() expect(rows).toHaveLength(1) + for (const row of rows) { + expect(row).toMatchObject({ name: "baz" }) + } }) it("should not allow handlebars as parameters", async () => { @@ -490,11 +539,14 @@ describe.each( expect(result.data).toEqual([{ created: true }]) - const rows = await rawQuery( - datasource, - `SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'` - ) + const rows = await client("test_table") + .where({ birthday: datetimeStr }) + .select() expect(rows).toHaveLength(1) + + for (const row of rows) { + expect(new Date(row.birthday)).toEqual(date) + } } ) @@ -522,10 +574,9 @@ describe.each( expect(result.data).toEqual([{ created: true }]) - const rows = await rawQuery( - datasource, - `SELECT * FROM test_table WHERE name = '${notDateStr}'` - ) + const rows = await client("test_table") + .where({ name: notDateStr }) + .select() expect(rows).toHaveLength(1) } ) @@ -660,10 +711,7 @@ describe.each( }, ]) - const rows = await rawQuery( - datasource, - "SELECT * FROM test_table WHERE id = 1" - ) + const rows = await client("test_table").where({ id: 1 }).select() expect(rows).toEqual([ { id: 1, name: "foo", birthday: null, number: null }, ]) @@ -731,10 +779,7 @@ describe.each( }, ]) - const rows = await rawQuery( - datasource, - "SELECT * FROM test_table WHERE id = 1" - ) + const rows = await client("test_table").where({ id: 1 }).select() expect(rows).toHaveLength(0) }) }) @@ -750,6 +795,7 @@ describe.each( name: entityId, schema: {}, type: "table", + primary: ["id"], sourceId: datasource._id!, sourceType: TableSourceType.EXTERNAL, }, diff --git a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts index c79ae68a36..4822729478 100644 --- a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts +++ b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts @@ -137,6 +137,67 @@ describe("/queries", () => { }) }) + it("should update schema when structure changes from object to array", async () => { + const name = generator.guid() + + await withCollection(async collection => { + await collection.insertOne({ name, field: { subfield: "value" } }) + }) + + const firstPreview = await config.api.query.preview({ + name: "Test Query", + datasourceId: datasource._id!, + fields: { + json: { name: { $eq: name } }, + extra: { + collection, + actionType: "findOne", + }, + }, + schema: {}, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(firstPreview.schema).toEqual( + expect.objectContaining({ + field: { type: "json", name: "field" }, + }) + ) + + await withCollection(async collection => { + await collection.updateOne( + { name }, + { $set: { field: ["value1", "value2"] } } + ) + }) + + const secondPreview = await config.api.query.preview({ + name: "Test Query", + datasourceId: datasource._id!, + fields: { + json: { name: { $eq: name } }, + extra: { + collection, + actionType: "findOne", + }, + }, + schema: firstPreview.schema, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(secondPreview.schema).toEqual( + expect.objectContaining({ + field: { type: "array", name: "field" }, + }) + ) + }) + it("should generate a nested schema based on all of the nested items", async () => { const name = generator.guid() const item = { diff --git a/packages/server/src/api/routes/tests/queries/rest.spec.ts b/packages/server/src/api/routes/tests/queries/rest.spec.ts index 1d5483017b..29bbbf3a61 100644 --- a/packages/server/src/api/routes/tests/queries/rest.spec.ts +++ b/packages/server/src/api/routes/tests/queries/rest.spec.ts @@ -92,6 +92,61 @@ describe("rest", () => { expect(cached.rows[0].name).toEqual("one") }) + it("should update schema when structure changes from JSON to array", async () => { + const datasource = await config.api.datasource.create({ + name: generator.guid(), + type: "test", + source: SourceName.REST, + config: {}, + }) + + nock("http://www.example.com") + .get("/") + .reply(200, [{ obj: {}, id: "1" }]) + + const firstResponse = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "test query", + parameters: [], + queryVerb: "read", + transformer: "", + schema: {}, + readable: true, + fields: { + path: "www.example.com", + }, + }) + + expect(firstResponse.schema).toEqual({ + obj: { type: "json", name: "obj" }, + id: { type: "string", name: "id" }, + }) + + nock.cleanAll() + + nock("http://www.example.com") + .get("/") + .reply(200, [{ obj: [], id: "1" }]) + + const secondResponse = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "test query", + parameters: [], + queryVerb: "read", + transformer: "", + schema: firstResponse.schema, + readable: true, + fields: { + path: "www.example.com", + }, + }) + + expect(secondResponse.schema).toEqual({ + obj: { type: "array", name: "obj" }, + id: { type: "string", name: "id" }, + }) + }) + it("should parse global and query level header mappings", async () => { const datasource = await config.api.datasource.create({ name: generator.guid(), diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 4801ac4c55..b6e3edf5ff 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1,6 +1,11 @@ -import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" +import { + DatabaseName, + getDatasource, + knexClient, +} from "../../../integrations/tests/utils" import tk from "timekeeper" +import emitter from "../../../../src/events" import { outputProcessing } from "../../../utilities/rowProcessor" import * as setup from "./utilities" import { context, InternalTable, tenancy } from "@budibase/backend-core" @@ -24,13 +29,38 @@ import { StaticQuotaName, Table, TableSourceType, + UpdatedRowEventEmitter, + TableSchema, } from "@budibase/types" import { generator, mocks } from "@budibase/backend-core/tests" import _, { merge } from "lodash" import * as uuid from "uuid" +import { Knex } from "knex" const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() tk.freeze(timestamp) +interface WaitOptions { + name: string + matchFn?: (event: any) => boolean +} +async function waitForEvent( + opts: WaitOptions, + callback: () => Promise +): Promise { + const p = new Promise((resolve: any) => { + const listener = (event: any) => { + if (opts.matchFn && !opts.matchFn(event)) { + return + } + resolve(event) + emitter.off(opts.name, listener) + } + emitter.on(opts.name, listener) + }) + + await callback() + return await p +} describe.each([ ["internal", undefined], @@ -38,19 +68,23 @@ describe.each([ [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], -])("/rows (%s)", (__, dsProvider) => { +])("/rows (%s)", (providerType, dsProvider) => { const isInternal = dsProvider === undefined + const isMSSQL = providerType === DatabaseName.SQL_SERVER const config = setup.getConfig() let table: Table let datasource: Datasource | undefined + let client: Knex | undefined beforeAll(async () => { await config.init() if (dsProvider) { + const rawDatasource = await dsProvider datasource = await config.createDatasource({ - datasource: await dsProvider, + datasource: rawDatasource, }) + client = await knexClient(rawDatasource) } }) @@ -64,6 +98,23 @@ describe.each([ // the table name they're writing to. ...overrides: Partial>[] ): SaveTableRequest { + const defaultSchema: TableSchema = { + id: { + type: FieldType.AUTO, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, + }, + } + + for (const override of overrides) { + if (override.primary) { + delete defaultSchema.id + } + } + const req: SaveTableRequest = { name: uuid.v4().substring(0, 10), type: "table", @@ -72,16 +123,7 @@ describe.each([ : TableSourceType.INTERNAL, sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, primary: ["id"], - schema: { - id: { - type: FieldType.AUTO, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - }, + schema: defaultSchema, } return merge(req, ...overrides) } @@ -134,6 +176,10 @@ describe.each([ // error. This is to account for the fact that parallel writes can result // in some quota updates getting lost. We don't have any need to solve this // right now, so we just allow for some error. + if (expected === 0) { + expect(usage).toEqual(0) + return + } expect(usage).toBeGreaterThan(expected * 0.9) expect(usage).toBeLessThan(expected * 1.1) } @@ -158,7 +204,7 @@ describe.each([ }) expect(row.name).toEqual("Test Contact") expect(row._rev).toBeDefined() - await assertRowUsage(rowUsage + 1) + await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage) }) it("fails to create a row for a table that does not exist", async () => { @@ -230,7 +276,7 @@ describe.each([ expect(row["Row ID"]).toBeGreaterThan(previousId) previousId = row["Row ID"] } - await assertRowUsage(rowUsage + 10) + await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage) }) isInternal && @@ -269,13 +315,13 @@ describe.each([ // as quickly as possible. await Promise.all( sequence.map(async () => { - const attempts = 20 + const attempts = 30 for (let attempt = 0; attempt < attempts; attempt++) { try { await config.api.row.save(table._id!, {}) return } catch (e) { - await new Promise(r => setTimeout(r, Math.random() * 15)) + await new Promise(r => setTimeout(r, Math.random() * 50)) } } throw new Error(`Failed to create row after ${attempts} attempts`) @@ -560,6 +606,35 @@ describe.each([ expect(res.name).toEqual("Updated Name") await assertRowUsage(rowUsage) }) + + !isInternal && + it("can update a row on an external table with a primary key", async () => { + const tableName = uuid.v4().substring(0, 10) + await client!.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = res.datasource.entities![tableName] + + const row = await config.api.row.save(table._id!, { + id: 1, + name: "Row 1", + }) + + const updatedRow = await config.api.row.save(table._id!, { + _id: row._id!, + name: "Row 1 Updated", + }) + + expect(updatedRow.name).toEqual("Row 1 Updated") + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(1) + }) }) describe("patch", () => { @@ -604,6 +679,32 @@ describe.each([ await assertRowUsage(rowUsage) }) + it("should update only the fields that are supplied and emit the correct oldRow", async () => { + let beforeRow = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + const opts = { + name: "row:update", + matchFn: (event: UpdatedRowEventEmitter) => + event.row._id === beforeRow._id, + } + const event = await waitForEvent(opts, async () => { + await config.api.row.patch(table._id!, { + _id: beforeRow._id!, + _rev: beforeRow._rev!, + tableId: table._id!, + name: "Updated Name", + }) + }) + + expect(event.oldRow).toBeDefined() + expect(event.oldRow.name).toEqual("test") + expect(event.row.name).toEqual("Updated Name") + expect(event.oldRow.description).toEqual(beforeRow.description) + expect(event.row.description).toEqual(beforeRow.description) + }) + it("should throw an error when given improper types", async () => { const existing = await config.api.row.save(table._id!, {}) const rowUsage = await getRowUsage() @@ -693,6 +794,50 @@ describe.each([ }) expect(resp.relationship.length).toBe(1) }) + + !isInternal && + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. + providerType !== DatabaseName.SQL_SERVER && + it("should support updating fields that are part of a composite key", async () => { + const tableRequest = saveTableRequest({ + primary: ["number", "string"], + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + number: { + type: FieldType.NUMBER, + name: "number", + }, + }, + }) + + delete tableRequest.schema.id + + const table = await config.api.table.save(tableRequest) + + const stringValue = generator.word() + const naturalValue = generator.integer({ min: 0, max: 1000 }) + + const existing = await config.api.row.save(table._id!, { + string: stringValue, + number: naturalValue, + }) + + expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`) + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + string: stringValue, + number: 1500, + }) + + expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`) + }) }) describe("destroy", () => { @@ -708,18 +853,21 @@ describe.each([ rows: [createdRow], }) expect(res[0]._id).toEqual(createdRow._id) - await assertRowUsage(rowUsage - 1) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) }) it("should be able to bulk delete rows, including a row that doesn't exist", async () => { const createdRow = await config.api.row.save(table._id!, {}) + const createdRow2 = await config.api.row.save(table._id!, {}) const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow, { _id: "9999999" }], + rows: [createdRow, createdRow2, { _id: "9999999" }], }) - expect(res[0]._id).toEqual(createdRow._id) - expect(res.length).toEqual(1) + expect(res.map(r => r._id)).toEqual( + expect.arrayContaining([createdRow._id, createdRow2._id]) + ) + expect(res.length).toEqual(2) }) }) @@ -771,7 +919,7 @@ describe.each([ expect(res.length).toEqual(2) await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(rowUsage - 2) + await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) }) it("should be able to delete a variety of row set types", async () => { @@ -788,7 +936,7 @@ describe.each([ expect(res.length).toEqual(3) await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(rowUsage - 3) + await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage) }) it("should accept a valid row object and delete the row", async () => { @@ -799,35 +947,24 @@ describe.each([ expect(res.id).toEqual(row1._id) await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(rowUsage - 1) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) }) - it("Should ignore malformed/invalid delete requests", async () => { - const rowUsage = await getRowUsage() + it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( + "Should ignore malformed/invalid delete request: %s", + async (request: any) => { + const rowUsage = await getRowUsage() - await config.api.row.delete(table._id!, { not: "valid" } as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) + await config.api.row.delete(table._id!, request, { + status: 400, + body: { + message: "Invalid delete rows request", + }, + }) - await config.api.row.delete(table._id!, { rows: 123 } as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await config.api.row.delete(table._id!, "invalid" as any, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await assertRowUsage(rowUsage) - }) + await assertRowUsage(rowUsage) + } + ) }) describe("bulkImport", () => { @@ -861,6 +998,236 @@ describe.each([ row = await config.api.row.save(table._id!, {}) expect(row.autoId).toEqual(3) }) + + it("should be able to bulkImport rows", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + + await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + it("should be able to update existing rows with bulkImport", async () => { + const table = await config.api.table.save( + saveTableRequest({ + primary: ["userId"], + schema: { + userId: { + type: FieldType.NUMBER, + name: "userId", + constraints: { + presence: true, + }, + }, + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const row1 = await config.api.row.save(table._id!, { + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isInternal && + it("should be able to update existing rows with composite primary keys with bulkImport", async () => { + const tableName = uuid.v4() + await client?.schema.createTable(tableName, table => { + table.integer("companyId") + table.integer("userId") + table.string("name") + table.string("description") + table.primary(["companyId", "userId"]) + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["companyId", "userId"], + rows: [ + { + companyId: 1, + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + companyId: 1, + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + companyId: 1, + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isInternal && + it("should be able to update existing rows an autoID primary key", async () => { + const tableName = uuid.v4() + await client!.schema.createTable(tableName, table => { + table.increments("userId").primary() + table.string("name") + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + name: "Clare", + }) + + const row2 = await config.api.row.save(table._id!, { + name: "Jeff", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Clare updated", + }, + { + userId: row2.userId, + name: "Jeff updated", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Clare updated") + expect(rows[1].name).toEqual("Jeff updated") + }) }) describe("enrich", () => { @@ -1637,3 +2004,5 @@ describe.each([ }) }) }) + +// todo: remove me diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index aac43874a0..2d01ec0f6d 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -1,5 +1,9 @@ import { tableForDatasource } from "../../../tests/utilities/structures" -import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" +import { + DatabaseName, + getDatasource, + knexClient, +} from "../../../integrations/tests/utils" import { db as dbCore, utils } from "@budibase/backend-core" import * as setup from "./utilities" @@ -18,34 +22,35 @@ import { User, Row, RelationshipType, + SearchResponse, } from "@budibase/types" import _ from "lodash" import tk from "timekeeper" import { encodeJSBinding } from "@budibase/string-templates" +import { dataFilters } from "@budibase/shared-core" +import { Knex } from "knex" +import { structures } from "@budibase/backend-core/tests" describe.each([ + ["in-memory", undefined], ["lucene", undefined], ["sqs", undefined], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], -])("/api/:sourceId/search (%s)", (name, dsProvider) => { +])("search (%s)", (name, dsProvider) => { const isSqs = name === "sqs" const isLucene = name === "lucene" + const isInMemory = name === "in-memory" const isInternal = isSqs || isLucene const config = setup.getConfig() let envCleanup: (() => void) | undefined let datasource: Datasource | undefined + let client: Knex | undefined let table: Table - - const snippets = [ - { - name: "WeeksAgo", - code: `return function (weeks) {\n const currentTime = new Date(${Date.now()});\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}`, - }, - ] + let rows: Row[] beforeAll(async () => { if (isSqs) { @@ -55,13 +60,20 @@ describe.each([ if (config.app?.appId) { config.app = await config.api.application.update(config.app?.appId, { - snippets, + snippets: [ + { + name: "WeeksAgo", + code: `return function (weeks) {\n const currentTime = new Date(${Date.now()});\n currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1)));\n return currentTime.toISOString();\n}`, + }, + ], }) } if (dsProvider) { + const rawDatasource = await dsProvider + client = await knexClient(rawDatasource) datasource = await config.createDatasource({ - datasource: await dsProvider, + datasource: rawDatasource, }) } }) @@ -73,20 +85,34 @@ describe.each([ } }) - async function createTable(schema: TableSchema) { + async function createTable(schema: TableSchema, name?: string) { return await config.api.table.save( - tableForDatasource(datasource, { schema }) + tableForDatasource(datasource, { schema, name }) ) } - async function createRows(rows: Record[]) { + async function createRows(arr: Record[]) { // Shuffling to avoid false positives given a fixed order - await config.api.row.bulkImport(table._id!, { rows: _.shuffle(rows) }) + await config.api.row.bulkImport(table._id!, { + rows: _.shuffle(arr), + }) + rows = await config.api.row.fetch(table._id!) } class SearchAssertion { constructor(private readonly query: RowSearchParams) {} + private async performSearch(): Promise> { + if (isInMemory) { + return dataFilters.search(_.cloneDeep(rows), this.query) + } else { + return config.api.row.search(table._id!, { + ...this.query, + tableId: table._id!, + }) + } + } + // We originally used _.isMatch to compare rows, but found that when // comparing arrays it would return true if the source array was a subset of // the target array. This would sometimes create false matches. This @@ -157,10 +183,7 @@ describe.each([ // different to the one passed in will cause the assertion to fail. Extra // rows returned by the query will also cause the assertion to fail. async toMatchExactly(expectedRows: any[]) { - const { rows: foundRows } = await config.api.row.search(table._id!, { - ...this.query, - tableId: table._id!, - }) + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -176,10 +199,7 @@ describe.each([ // passed in. The order of the rows is not important, but extra rows will // cause the assertion to fail. async toContainExactly(expectedRows: any[]) { - const { rows: foundRows } = await config.api.row.search(table._id!, { - ...this.query, - tableId: table._id!, - }) + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(expectedRows.length) @@ -193,14 +213,36 @@ describe.each([ ) } + // Asserts that the query returns some property values - this cannot be used + // to check row values, however this shouldn't be important for checking properties + // typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...) + async toMatch(properties: Record) { + const response = await this.performSearch() + const keys = Object.keys(properties) as Array> + for (let key of keys) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toBeDefined() + if (properties[key]) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toEqual(properties[key]) + } + } + } + + // Asserts that the query doesn't return a property, e.g. pagination parameters. + async toNotHaveProperty(properties: (keyof SearchResponse)[]) { + const response = await this.performSearch() + for (let property of properties) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[property]).toBeUndefined() + } + } + // Asserts that the query returns rows matching the set of rows passed in. // The order of the rows is not important. Extra rows will not cause the // assertion to fail. async toContain(expectedRows: any[]) { - const { rows: foundRows } = await config.api.row.search(table._id!, { - ...this.query, - tableId: table._id!, - }) + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect([...foundRows]).toEqual( @@ -217,10 +259,7 @@ describe.each([ } async toHaveLength(length: number) { - const { rows: foundRows } = await config.api.row.search(table._id!, { - ...this.query, - tableId: table._id!, - }) + const { rows: foundRows } = await this.performSearch() // eslint-disable-next-line jest/no-standalone-expect expect(foundRows).toHaveLength(length) @@ -244,266 +283,276 @@ describe.each([ }) describe("equal", () => { - it("successfully finds true row", () => - expectQuery({ equal: { isTrue: true } }).toMatchExactly([ + it("successfully finds true row", async () => { + await expectQuery({ equal: { isTrue: true } }).toMatchExactly([ { isTrue: true }, - ])) + ]) + }) - it("successfully finds false row", () => - expectQuery({ equal: { isTrue: false } }).toMatchExactly([ + it("successfully finds false row", async () => { + await expectQuery({ equal: { isTrue: false } }).toMatchExactly([ { isTrue: false }, - ])) + ]) + }) }) describe("notEqual", () => { - it("successfully finds false row", () => - expectQuery({ notEqual: { isTrue: true } }).toContainExactly([ + it("successfully finds false row", async () => { + await expectQuery({ notEqual: { isTrue: true } }).toContainExactly([ { isTrue: false }, - ])) + ]) + }) - it("successfully finds true row", () => - expectQuery({ notEqual: { isTrue: false } }).toContainExactly([ + it("successfully finds true row", async () => { + await expectQuery({ notEqual: { isTrue: false } }).toContainExactly([ { isTrue: true }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds true row", () => - expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly([ + it("successfully finds true row", async () => { + await expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly([ { isTrue: true }, - ])) + ]) + }) - it("successfully finds false row", () => - expectQuery({ oneOf: { isTrue: [false] } }).toContainExactly([ + it("successfully finds false row", async () => { + await expectQuery({ oneOf: { isTrue: [false] } }).toContainExactly([ { isTrue: false }, - ])) + ]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "isTrue", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ isTrue: false }, { isTrue: true }])) + }).toMatchExactly([{ isTrue: false }, { isTrue: true }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "isTrue", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ isTrue: true }, { isTrue: false }])) + }).toMatchExactly([{ isTrue: true }, { isTrue: false }]) + }) }) }) - // Ensure all bindings resolve and perform as expected - describe("bindings", () => { - let globalUsers: any = [] + // We've decided not to try and support binding for in-memory search just now. + !isInMemory && + describe("bindings", () => { + let globalUsers: any = [] - const serverTime = new Date() + const serverTime = new Date() - // In MariaDB and MySQL we only store dates to second precision, so we need - // to remove milliseconds from the server time to ensure searches work as - // expected. - serverTime.setMilliseconds(0) + // In MariaDB and MySQL we only store dates to second precision, so we need + // to remove milliseconds from the server time to ensure searches work as + // expected. + serverTime.setMilliseconds(0) - const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30) + const future = new Date(serverTime.getTime() + 1000 * 60 * 60 * 24 * 30) - const rows = (currentUser: User) => { - return [ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - { name: currentUser.firstName, appointment: future.toISOString() }, - { name: "serverDate", appointment: serverTime.toISOString() }, - { - name: "single user, session user", - single_user: JSON.stringify(currentUser), - }, - { - name: "single user", - single_user: JSON.stringify(globalUsers[0]), - }, - { - name: "deprecated single user, session user", - deprecated_single_user: JSON.stringify([currentUser]), - }, - { - name: "deprecated single user", - deprecated_single_user: JSON.stringify([globalUsers[0]]), - }, - { - name: "multi user", - multi_user: JSON.stringify(globalUsers), - }, - { - name: "multi user with session user", - multi_user: JSON.stringify([...globalUsers, currentUser]), - }, - { - name: "deprecated multi user", - deprecated_multi_user: JSON.stringify(globalUsers), - }, - { - name: "deprecated multi user with session user", - deprecated_multi_user: JSON.stringify([...globalUsers, currentUser]), - }, - ] - } - - beforeAll(async () => { - // Set up some global users - globalUsers = await Promise.all( - Array(2) - .fill(0) - .map(async () => { - const globalUser = await config.globalUser() - const userMedataId = globalUser._id - ? dbCore.generateUserMetadataID(globalUser._id) - : null - return { - _id: globalUser._id, - _meta: userMedataId, - } - }) - ) - - table = await createTable({ - name: { name: "name", type: FieldType.STRING }, - appointment: { name: "appointment", type: FieldType.DATETIME }, - single_user: { - name: "single_user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - deprecated_single_user: { - name: "deprecated_single_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - }, - multi_user: { - name: "multi_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", + const rows = (currentUser: User) => { + return [ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + { name: currentUser.firstName, appointment: future.toISOString() }, + { name: "serverDate", appointment: serverTime.toISOString() }, + { + name: "single user, session user", + single_user: JSON.stringify(currentUser), }, - }, - deprecated_multi_user: { - name: "deprecated_multi_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USERS, - constraints: { - type: "array", + { + name: "single user", + single_user: JSON.stringify(globalUsers[0]), }, - }, - }) - await createRows(rows(config.getUser())) - }) + { + name: "deprecated single user, session user", + deprecated_single_user: JSON.stringify([currentUser]), + }, + { + name: "deprecated single user", + deprecated_single_user: JSON.stringify([globalUsers[0]]), + }, + { + name: "multi user", + multi_user: JSON.stringify(globalUsers), + }, + { + name: "multi user with session user", + multi_user: JSON.stringify([...globalUsers, currentUser]), + }, + { + name: "deprecated multi user", + deprecated_multi_user: JSON.stringify(globalUsers), + }, + { + name: "deprecated multi user with session user", + deprecated_multi_user: JSON.stringify([ + ...globalUsers, + currentUser, + ]), + }, + ] + } - // !! Current User is auto generated per run - it("should return all rows matching the session user firstname", async () => { - await expectQuery({ - equal: { name: "{{ [user].firstName }}" }, - }).toContainExactly([ - { - name: config.getUser().firstName, - appointment: future.toISOString(), - }, - ]) - }) + beforeAll(async () => { + // Set up some global users + globalUsers = await Promise.all( + Array(2) + .fill(0) + .map(async () => { + const globalUser = await config.globalUser() + const userMedataId = globalUser._id + ? dbCore.generateUserMetadataID(globalUser._id) + : null + return { + _id: globalUser._id, + _meta: userMedataId, + } + }) + ) - it("should parse the date binding and return all rows after the resolved value", async () => { - await tk.withFreeze(serverTime, async () => { - await expectQuery({ - range: { - appointment: { - low: "{{ [now] }}", - high: "9999-00-00T00:00:00.000Z", + table = await createTable({ + name: { name: "name", type: FieldType.STRING }, + appointment: { name: "appointment", type: FieldType.DATETIME }, + single_user: { + name: "single_user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + deprecated_single_user: { + name: "deprecated_single_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + }, + multi_user: { + name: "multi_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", }, }, + deprecated_multi_user: { + name: "deprecated_multi_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USERS, + constraints: { + type: "array", + }, + }, + }) + await createRows(rows(config.getUser())) + }) + + // !! Current User is auto generated per run + it("should return all rows matching the session user firstname", async () => { + await expectQuery({ + equal: { name: "{{ [user].firstName }}" }, }).toContainExactly([ { name: config.getUser().firstName, appointment: future.toISOString(), }, + ]) + }) + + it("should parse the date binding and return all rows after the resolved value", async () => { + await tk.withFreeze(serverTime, async () => { + await expectQuery({ + range: { + appointment: { + low: "{{ [now] }}", + high: "9999-00-00T00:00:00.000Z", + }, + }, + }).toContainExactly([ + { + name: config.getUser().firstName, + appointment: future.toISOString(), + }, + { name: "serverDate", appointment: serverTime.toISOString() }, + ]) + }) + }) + + it("should parse the date binding and return all rows before the resolved value", async () => { + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: "{{ [now] }}", + }, + }, + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, { name: "serverDate", appointment: serverTime.toISOString() }, ]) }) - }) - it("should parse the date binding and return all rows before the resolved value", async () => { - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: "{{ [now] }}", + it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => { + const jsBinding = "return snippets.WeeksAgo();" + const encodedBinding = encodeJSBinding(jsBinding) + + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: encodedBinding, + }, }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - { name: "serverDate", appointment: serverTime.toISOString() }, - ]) - }) + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + ]) + }) - it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => { - const jsBinding = "return snippets.WeeksAgo();" - const encodedBinding = encodeJSBinding(jsBinding) + it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => { + const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();` + const encodedBinding = encodeJSBinding(jsBinding) - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: encodedBinding, + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: encodedBinding, + }, }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - ]) - }) + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + ]) + }) - it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => { - const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();` - const encodedBinding = encodeJSBinding(jsBinding) - - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: encodedBinding, + it("should match a single user row by the session user id", async () => { + await expectQuery({ + equal: { single_user: "{{ [user]._id }}" }, + }).toContainExactly([ + { + name: "single user, session user", + single_user: { _id: config.getUser()._id }, }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - ]) - }) + ]) + }) - it("should match a single user row by the session user id", async () => { - await expectQuery({ - equal: { single_user: "{{ [user]._id }}" }, - }).toContainExactly([ - { - name: "single user, session user", - single_user: { _id: config.getUser()._id }, - }, - ]) - }) + it("should match a deprecated single user row by the session user id", async () => { + await expectQuery({ + equal: { deprecated_single_user: "{{ [user]._id }}" }, + }).toContainExactly([ + { + name: "deprecated single user, session user", + deprecated_single_user: [{ _id: config.getUser()._id }], + }, + ]) + }) - it("should match a deprecated single user row by the session user id", async () => { - await expectQuery({ - equal: { deprecated_single_user: "{{ [user]._id }}" }, - }).toContainExactly([ - { - name: "deprecated single user, session user", - deprecated_single_user: [{ _id: config.getUser()._id }], - }, - ]) - }) - - // TODO(samwho): fix for SQS - !isSqs && it("should match the session user id in a multi user field", async () => { const allUsers = [...globalUsers, config.getUser()].map((user: any) => { return { _id: user._id } @@ -519,8 +568,6 @@ describe.each([ ]) }) - // TODO(samwho): fix for SQS - !isSqs && it("should match the session user id in a deprecated multi user field", async () => { const allUsers = [...globalUsers, config.getUser()].map((user: any) => { return { _id: user._id } @@ -536,8 +583,6 @@ describe.each([ ]) }) - // TODO(samwho): fix for SQS - !isSqs && it("should not match the session user id in a multi user field", async () => { await expectQuery({ notContains: { multi_user: ["{{ [user]._id }}"] }, @@ -552,8 +597,6 @@ describe.each([ ]) }) - // TODO(samwho): fix for SQS - !isSqs && it("should not match the session user id in a deprecated multi user field", async () => { await expectQuery({ notContains: { deprecated_multi_user: ["{{ [user]._id }}"] }, @@ -568,78 +611,78 @@ describe.each([ ]) }) - it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => { - await expectQuery({ - oneOf: { - single_user: [ - "{{ default [user]._id '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "single user, session user", - single_user: { _id: config.getUser()._id }, - }, - { - name: "single user", - single_user: { _id: globalUsers[0]._id }, - }, - ]) - }) + it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => { + await expectQuery({ + oneOf: { + single_user: [ + "{{ default [user]._id '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "single user, session user", + single_user: { _id: config.getUser()._id }, + }, + { + name: "single user", + single_user: { _id: globalUsers[0]._id }, + }, + ]) + }) - it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => { - await expectQuery({ - oneOf: { - deprecated_single_user: [ - "{{ default [user]._id '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "deprecated single user, session user", - deprecated_single_user: [{ _id: config.getUser()._id }], - }, - { - name: "deprecated single user", - deprecated_single_user: [{ _id: globalUsers[0]._id }], - }, - ]) - }) + it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => { + await expectQuery({ + oneOf: { + deprecated_single_user: [ + "{{ default [user]._id '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "deprecated single user, session user", + deprecated_single_user: [{ _id: config.getUser()._id }], + }, + { + name: "deprecated single user", + deprecated_single_user: [{ _id: globalUsers[0]._id }], + }, + ]) + }) - it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => { - await expectQuery({ - oneOf: { - single_user: [ - "{{ default [user]._idx '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "single user", - single_user: { _id: globalUsers[0]._id }, - }, - ]) - }) + it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => { + await expectQuery({ + oneOf: { + single_user: [ + "{{ default [user]._idx '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "single user", + single_user: { _id: globalUsers[0]._id }, + }, + ]) + }) - it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => { - await expectQuery({ - oneOf: { - deprecated_single_user: [ - "{{ default [user]._idx '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "deprecated single user", - deprecated_single_user: [{ _id: globalUsers[0]._id }], - }, - ]) + it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => { + await expectQuery({ + oneOf: { + deprecated_single_user: [ + "{{ default [user]._idx '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "deprecated single user", + deprecated_single_user: [{ _id: globalUsers[0]._id }], + }, + ]) + }) }) - }) describe.each([FieldType.STRING, FieldType.LONGFORM])("%s", () => { beforeAll(async () => { @@ -650,192 +693,269 @@ describe.each([ }) describe("misc", () => { - it("should return all if no query is passed", () => - expectSearch({} as RowSearchParams).toContainExactly([ + it("should return all if no query is passed", async () => { + await expectSearch({} as RowSearchParams).toContainExactly([ { name: "foo" }, { name: "bar" }, - ])) + ]) + }) - it("should return all if empty query is passed", () => - expectQuery({}).toContainExactly([{ name: "foo" }, { name: "bar" }])) + it("should return all if empty query is passed", async () => { + await expectQuery({}).toContainExactly([ + { name: "foo" }, + { name: "bar" }, + ]) + }) - it("should return all if onEmptyFilter is RETURN_ALL", () => - expectQuery({ + it("should return all if onEmptyFilter is RETURN_ALL", async () => { + await expectQuery({ onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) - it("should return nothing if onEmptyFilter is RETURN_NONE", () => - expectQuery({ + it("should return nothing if onEmptyFilter is RETURN_NONE", async () => { + await expectQuery({ onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toFindNothing()) + }).toFindNothing() + }) - it("should respect limit", () => - expectSearch({ limit: 1, paginate: true, query: {} }).toHaveLength(1)) + it("should respect limit", async () => { + await expectSearch({ + limit: 1, + paginate: true, + query: {}, + }).toHaveLength(1) + }) }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { name: "foo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { name: "foo" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { name: "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { name: "none" } }).toFindNothing() + }) - it("works as an or condition", () => - expectQuery({ + it("works as an or condition", async () => { + await expectQuery({ allOr: true, equal: { name: "foo" }, oneOf: { name: ["bar"] }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) - it("can have multiple values for same column", () => - expectQuery({ + it("can have multiple values for same column", async () => { + await expectQuery({ allOr: true, equal: { "1:name": "foo", "2:name": "bar" }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { name: "foo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { name: "foo" } }).toContainExactly([ { name: "bar" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { name: "bar" } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { name: "bar" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { name: ["none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing() + }) }) describe("fuzzy", () => { - it("successfully finds a row", () => - expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ fuzzy: { name: "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ fuzzy: { name: "none" } }).toFindNothing() + }) }) describe("string", () => { - it("successfully finds a row", () => - expectQuery({ string: { name: "fo" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ string: { name: "fo" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ string: { name: "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ string: { name: "none" } }).toFindNothing() + }) - it("is case-insensitive", () => - expectQuery({ string: { name: "FO" } }).toContainExactly([ + it("is case-insensitive", async () => { + await expectQuery({ string: { name: "FO" } }).toContainExactly([ { name: "foo" }, - ])) + ]) + }) }) describe("range", () => { - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { name: { low: "a", high: "z" } }, - }).toContainExactly([{ name: "bar" }, { name: "foo" }])) + }).toContainExactly([{ name: "bar" }, { name: "foo" }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { name: { low: "a", high: "c" } }, - }).toContainExactly([{ name: "bar" }])) + }).toContainExactly([{ name: "bar" }]) + }) - it("successfully finds a row with a low bound", () => - expectQuery({ + it("successfully finds a row with a low bound", async () => { + await expectQuery({ range: { name: { low: "f", high: "z" } }, - }).toContainExactly([{ name: "foo" }])) + }).toContainExactly([{ name: "foo" }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { name: { low: "g", high: "h" } }, - }).toFindNothing()) + }).toFindNothing() + }) !isLucene && - it("ignores low if it's an empty object", () => - expectQuery({ + it("ignores low if it's an empty object", async () => { + await expectQuery({ // @ts-ignore range: { name: { low: {}, high: "z" } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) !isLucene && - it("ignores high if it's an empty object", () => - expectQuery({ + it("ignores high if it's an empty object", async () => { + await expectQuery({ // @ts-ignore range: { name: { low: "a", high: {} } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) }) describe("empty", () => { - it("finds no empty rows", () => - expectQuery({ empty: { name: null } }).toFindNothing()) + it("finds no empty rows", async () => { + await expectQuery({ empty: { name: null } }).toFindNothing() + }) - it("should not be affected by when filter empty behaviour", () => - expectQuery({ + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ empty: { name: null }, onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toFindNothing()) + }).toFindNothing() + }) }) describe("notEmpty", () => { - it("finds all non-empty rows", () => - expectQuery({ notEmpty: { name: null } }).toContainExactly([ + it("finds all non-empty rows", async () => { + await expectQuery({ notEmpty: { name: null } }).toContainExactly([ { name: "foo" }, { name: "bar" }, - ])) + ]) + }) - it("should not be affected by when filter empty behaviour", () => - expectQuery({ + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ notEmpty: { name: null }, onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toContainExactly([{ name: "foo" }, { name: "bar" }])) + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "name", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "name", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) describe("sortType STRING", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "name", sortType: SortType.STRING, sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "name", sortType: SortType.STRING, sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) }) + + !isInternal && + !isInMemory && + // This test was added because we automatically add in a sort by the + // primary key, and we used to do this unconditionally which caused + // problems because it was possible for the primary key to appear twice + // in the resulting SQL ORDER BY clause, resulting in an SQL error. + // We now check first to make sure that the primary key isn't already + // in the sort before adding it. + describe("sort on primary key", () => { + beforeAll(async () => { + const tableName = structures.uuid().substring(0, 10) + await client!.schema.createTable(tableName, t => { + t.string("name").primary() + }) + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + + table = resp.datasource.entities![tableName] + + await createRows([{ name: "foo" }, { name: "bar" }]) + }) + + it("should be able to sort by a primary key column ascending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + + it("should be able to sort by a primary key column descending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }) }) }) @@ -848,97 +968,119 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ equal: { age: 1 } }).toContainExactly([{ age: 1 }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { age: 2 } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { age: 2 } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { age: 1 } }).toContainExactly([{ age: 10 }])) + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { age: 1 } }).toContainExactly([ + { age: 10 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { age: 10 } }).toContainExactly([{ age: 1 }])) + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { age: 10 } }).toContainExactly([ + { age: 1 }, + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { age: [1] } }).toContainExactly([{ age: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { age: [1] } }).toContainExactly([ + { age: 1 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { age: [2] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { age: [2] } }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { age: { low: 1, high: 5 } }, - }).toContainExactly([{ age: 1 }])) + }).toContainExactly([{ age: 1 }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { age: { low: 1, high: 10 } }, - }).toContainExactly([{ age: 1 }, { age: 10 }])) + }).toContainExactly([{ age: 1 }, { age: 10 }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { age: { low: 5, high: 10 } }, - }).toContainExactly([{ age: 10 }])) + }).toContainExactly([{ age: 10 }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { age: { low: 5, high: 9 } }, - }).toFindNothing()) + }).toFindNothing() + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { age: { low: 5 } }, - }).toContainExactly([{ age: 10 }])) + }).toContainExactly([{ age: 10 }]) + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { age: { high: 5 } }, - }).toContainExactly([{ age: 1 }])) + }).toContainExactly([{ age: 1 }]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "age", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }])) + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "age", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }])) + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) }) describe("sortType NUMBER", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "age", sortType: SortType.NUMBER, sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }])) + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "age", sortType: SortType.NUMBER, sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }])) + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) }) }) @@ -958,117 +1100,133 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([ { dob: JAN_1ST }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { dob: JAN_1ST } }).toContainExactly([ { dob: JAN_10TH }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { dob: JAN_10TH } }).toContainExactly([ { dob: JAN_1ST }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly([ { dob: JAN_1ST }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { dob: { low: JAN_1ST, high: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_1ST }])) + }).toContainExactly([{ dob: JAN_1ST }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { dob: { low: JAN_1ST, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])) + }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { dob: { low: JAN_5TH, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_10TH }])) + }).toContainExactly([{ dob: JAN_10TH }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { dob: { low: JAN_5TH, high: JAN_9TH } }, - }).toFindNothing()) + }).toFindNothing() + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { dob: { low: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_10TH }])) + }).toContainExactly([{ dob: JAN_10TH }]) + }) // We never implemented half-open ranges in Lucene. !isLucene && - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { dob: { high: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_1ST }])) + }).toContainExactly([{ dob: JAN_1ST }]) + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "dob", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])) + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "dob", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }])) + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) describe("sortType STRING", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "dob", sortType: SortType.STRING, sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }])) + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "dob", sortType: SortType.STRING, sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }])) + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) }) }) }) !isInternal && describe("datetime - time only", () => { - const T_1000 = "10:00" - const T_1045 = "10:45" - const T_1200 = "12:00" - const T_1530 = "15:30" - const T_0000 = "00:00" + const T_1000 = "10:00:00" + const T_1045 = "10:45:00" + const T_1200 = "12:00:00" + const T_1530 = "15:30:00" + const T_0000 = "00:00:00" - const UNEXISTING_TIME = "10:01" + const UNEXISTING_TIME = "10:01:00" const NULL_TIME__ID = `null_time__id` @@ -1089,72 +1247,85 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { time: T_1000 } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { time: T_1000 } }).toContainExactly([ { time: "10:00:00" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { time: UNEXISTING_TIME } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { time: UNEXISTING_TIME }, + }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { time: T_1000 } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { time: T_1000 } }).toContainExactly([ { timeid: NULL_TIME__ID }, { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, { time: "00:00:00" }, - ])) + ]) + }) - it("return all when requesting non-existing", () => - expectQuery({ notEqual: { time: UNEXISTING_TIME } }).toContainExactly( - [ - { timeid: NULL_TIME__ID }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - { time: "00:00:00" }, - ] - )) + it("return all when requesting non-existing", async () => { + await expectQuery({ + notEqual: { time: UNEXISTING_TIME }, + }).toContainExactly([ + { timeid: NULL_TIME__ID }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + { time: "00:00:00" }, + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { time: [T_1000] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { time: [T_1000] } }).toContainExactly([ { time: "10:00:00" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { time: [UNEXISTING_TIME] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { time: [UNEXISTING_TIME] }, + }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { time: { low: T_1045, high: T_1045 } }, - }).toContainExactly([{ time: "10:45:00" }])) + }).toContainExactly([{ time: "10:45:00" }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { time: { low: T_1045, high: T_1530 } }, }).toContainExactly([ { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, - ])) + ]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME } }, - }).toFindNothing()) + }).toFindNothing() + }) }) describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "time", sortOrder: SortOrder.ASCENDING, @@ -1165,10 +1336,11 @@ describe.each([ { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, - ])) + ]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "time", sortOrder: SortOrder.DESCENDING, @@ -1179,11 +1351,12 @@ describe.each([ { time: "10:00:00" }, { time: "00:00:00" }, { timeid: NULL_TIME__ID }, - ])) + ]) + }) describe("sortType STRING", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "time", sortType: SortType.STRING, @@ -1195,10 +1368,11 @@ describe.each([ { time: "10:45:00" }, { time: "12:00:00" }, { time: "15:30:00" }, - ])) + ]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "time", sortType: SortType.STRING, @@ -1210,7 +1384,8 @@ describe.each([ { time: "10:00:00" }, { time: "00:00:00" }, { timeid: NULL_TIME__ID }, - ])) + ]) + }) }) }) }) @@ -1228,64 +1403,78 @@ describe.each([ }) describe("contains", () => { - it("successfully finds a row", () => - expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ contains: { numbers: ["one"] } }).toContainExactly([ { numbers: ["one", "two"] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ contains: { numbers: ["none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ contains: { numbers: ["none"] } }).toFindNothing() + }) - it("fails to find row containing all", () => - expectQuery({ + it("fails to find row containing all", async () => { + await expectQuery({ contains: { numbers: ["one", "two", "three"] }, - }).toFindNothing()) + }).toFindNothing() + }) - it("finds all with empty list", () => - expectQuery({ contains: { numbers: [] } }).toContainExactly([ + it("finds all with empty list", async () => { + await expectQuery({ contains: { numbers: [] } }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) }) describe("notContains", () => { - it("successfully finds a row", () => - expectQuery({ notContains: { numbers: ["one"] } }).toContainExactly([ - { numbers: ["three"] }, - ])) + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { numbers: ["one"] }, + }).toContainExactly([{ numbers: ["three"] }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ + it("fails to find nonexistent row", async () => { + await expectQuery({ notContains: { numbers: ["one", "two", "three"] }, }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) - it("finds all with empty list", () => - expectQuery({ notContains: { numbers: [] } }).toContainExactly([ + // Not sure if this is correct behaviour but changing it would be a + // breaking change. + it("finds all with empty list", async () => { + await expectQuery({ notContains: { numbers: [] } }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) }) describe("containsAny", () => { - it("successfully finds rows", () => - expectQuery({ + it("successfully finds rows", async () => { + await expectQuery({ containsAny: { numbers: ["one", "two", "three"] }, }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ containsAny: { numbers: ["none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { numbers: ["none"] }, + }).toFindNothing() + }) - it("finds all with empty list", () => - expectQuery({ containsAny: { numbers: [] } }).toContainExactly([ + it("finds all with empty list", async () => { + await expectQuery({ containsAny: { numbers: [] } }).toContainExactly([ { numbers: ["one", "two"] }, { numbers: ["three"] }, - ])) + ]) + }) }) }) @@ -1304,48 +1493,56 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { num: SMALL } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { num: SMALL } }).toContainExactly([ { num: SMALL }, - ])) + ]) + }) - it("successfully finds a big value", () => - expectQuery({ equal: { num: BIG } }).toContainExactly([{ num: BIG }])) + it("successfully finds a big value", async () => { + await expectQuery({ equal: { num: BIG } }).toContainExactly([ + { num: BIG }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { num: "2" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { num: "2" } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { num: SMALL } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { num: SMALL } }).toContainExactly([ { num: MEDIUM }, { num: BIG }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { num: 10 } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { num: 10 } }).toContainExactly([ { num: SMALL }, { num: MEDIUM }, { num: BIG }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([ { num: SMALL }, - ])) + ]) + }) - it("successfully finds all rows", () => - expectQuery({ oneOf: { num: [SMALL, MEDIUM, BIG] } }).toContainExactly([ - { num: SMALL }, - { num: MEDIUM }, - { num: BIG }, - ])) + it("successfully finds all rows", async () => { + await expectQuery({ + oneOf: { num: [SMALL, MEDIUM, BIG] }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }, { num: BIG }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { num: [2] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { num: [2] } }).toFindNothing() + }) }) // Range searches against bigints don't seem to work at all in Lucene, and I @@ -1353,35 +1550,41 @@ describe.each([ // we've decided not to spend time on it. !isLucene && describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { num: { low: SMALL, high: "5" } }, - }).toContainExactly([{ num: SMALL }])) + }).toContainExactly([{ num: SMALL }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { num: { low: SMALL, high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }])) + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { num: { low: MEDIUM, high: BIG } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }])) + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { num: { low: "5", high: "5" } }, - }).toFindNothing()) + }).toFindNothing() + }) - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { num: { low: MEDIUM } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }])) + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { num: { high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }])) + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) }) }) @@ -1400,16 +1603,20 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { auto: 1 } }).toContainExactly([{ auto: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ equal: { auto: 1 } }).toContainExactly([ + { auto: 1 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { auto: 0 } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { auto: 0 } }).toFindNothing() + }) }) describe("not equal", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { auto: 1 } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { auto: 1 } }).toContainExactly([ { auto: 2 }, { auto: 3 }, { auto: 4 }, @@ -1419,10 +1626,11 @@ describe.each([ { auto: 8 }, { auto: 9 }, { auto: 10 }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { auto: 0 } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { auto: 0 } }).toContainExactly([ { auto: 1 }, { auto: 2 }, { auto: 3 }, @@ -1433,55 +1641,66 @@ describe.each([ { auto: 8 }, { auto: 9 }, { auto: 10 }, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { auto: [1] } }).toContainExactly([{ auto: 1 }])) + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { auto: [1] } }).toContainExactly([ + { auto: 1 }, + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { auto: [0] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { auto: [0] } }).toFindNothing() + }) }) describe("range", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ range: { auto: { low: 1, high: 1 } }, - }).toContainExactly([{ auto: 1 }])) + }).toContainExactly([{ auto: 1 }]) + }) - it("successfully finds multiple rows", () => - expectQuery({ + it("successfully finds multiple rows", async () => { + await expectQuery({ range: { auto: { low: 1, high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }])) + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) - it("successfully finds a row with a high bound", () => - expectQuery({ + it("successfully finds a row with a high bound", async () => { + await expectQuery({ range: { auto: { low: 2, high: 2 } }, - }).toContainExactly([{ auto: 2 }])) + }).toContainExactly([{ auto: 2 }]) + }) - it("successfully finds no rows", () => - expectQuery({ + it("successfully finds no rows", async () => { + await expectQuery({ range: { auto: { low: 0, high: 0 } }, - }).toFindNothing()) + }).toFindNothing() + }) isSqs && - it("can search using just a low value", () => - expectQuery({ + it("can search using just a low value", async () => { + await expectQuery({ range: { auto: { low: 9 } }, - }).toContainExactly([{ auto: 9 }, { auto: 10 }])) + }).toContainExactly([{ auto: 9 }, { auto: 10 }]) + }) isSqs && - it("can search using just a high value", () => - expectQuery({ + it("can search using just a high value", async () => { + await expectQuery({ range: { auto: { high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }])) + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) }) isSqs && describe("sort", () => { - it("sorts ascending", () => - expectSearch({ + it("sorts ascending", async () => { + await expectSearch({ query: {}, sort: "auto", sortOrder: SortOrder.ASCENDING, @@ -1496,10 +1715,11 @@ describe.each([ { auto: 8 }, { auto: 9 }, { auto: 10 }, - ])) + ]) + }) - it("sorts descending", () => - expectSearch({ + it("sorts descending", async () => { + await expectSearch({ query: {}, sort: "auto", sortOrder: SortOrder.DESCENDING, @@ -1514,60 +1734,70 @@ describe.each([ { auto: 3 }, { auto: 2 }, { auto: 1 }, - ])) + ]) + }) // This is important for pagination. The order of results must always // be stable or pagination will break. We don't want the user to need // to specify an order for pagination to work. it("is stable without a sort specified", async () => { - let { rows } = await config.api.row.search(table._id!, { - tableId: table._id!, - query: {}, - }) + let { rows: fullRowList } = await config.api.row.search( + table._id!, + { + tableId: table._id!, + query: {}, + } + ) - for (let i = 0; i < 10; i++) { + // repeat the search many times to check the first row is always the same + let bookmark: string | number | undefined, + hasNextPage: boolean | undefined = true, + rowCount: number = 0 + do { const response = await config.api.row.search(table._id!, { tableId: table._id!, limit: 1, - query: {}, - }) - expect(response.rows).toEqual(rows) - } - }) - }) - - // TODO(samwho): fix for SQS - !isSqs && - describe("pagination", () => { - it("should paginate through all rows", async () => { - // @ts-ignore - let bookmark: string | number = undefined - let rows: Row[] = [] - - // eslint-disable-next-line no-constant-condition - while (true) { - const response = await config.api.row.search(table._id!, { - tableId: table._id!, - limit: 3, + paginate: true, query: {}, bookmark, - paginate: true, }) - - rows.push(...response.rows) - - if (!response.bookmark || !response.hasNextPage) { - break - } bookmark = response.bookmark - } - - expect(rows).toHaveLength(10) - expect(rows.map(row => row.auto)).toEqual( - expect.arrayContaining([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) - ) + hasNextPage = response.hasNextPage + expect(response.rows.length).toEqual(1) + const foundRow = response.rows[0] + expect(foundRow).toEqual(fullRowList[rowCount++]) + } while (hasNextPage) }) }) + + describe("pagination", () => { + it("should paginate through all rows", async () => { + // @ts-ignore + let bookmark: string | number = undefined + let rows: Row[] = [] + + // eslint-disable-next-line no-constant-condition + while (true) { + const response = await config.api.row.search(table._id!, { + tableId: table._id!, + limit: 3, + query: {}, + bookmark, + paginate: true, + }) + + rows.push(...response.rows) + + if (!response.bookmark || !response.hasNextPage) { + break + } + bookmark = response.bookmark + } + + const autoValues = rows.map(row => row.auto).sort((a, b) => a - b) + expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + }) + }) }) describe("field name 1:name", () => { @@ -1578,13 +1808,15 @@ describe.each([ await createRows([{ "1:name": "bar" }, { "1:name": "foo" }]) }) - it("successfully finds a row", () => - expectQuery({ equal: { "1:1:name": "bar" } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { "1:1:name": "bar" } }).toContainExactly([ { "1:name": "bar" }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing() + }) }) describe("user", () => { @@ -1611,51 +1843,59 @@ describe.each([ }) describe("equal", () => { - it("successfully finds a row", () => - expectQuery({ equal: { user: user1._id } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ equal: { user: user1._id } }).toContainExactly([ { user: { _id: user1._id } }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ equal: { user: "us_none" } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { user: "us_none" } }).toFindNothing() + }) }) describe("notEqual", () => { - it("successfully finds a row", () => - expectQuery({ notEqual: { user: user1._id } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { user: user1._id } }).toContainExactly([ { user: { _id: user2._id } }, {}, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notEqual: { user: "us_none" } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { user: "us_none" } }).toContainExactly([ { user: { _id: user1._id } }, { user: { _id: user2._id } }, {}, - ])) + ]) + }) }) describe("oneOf", () => { - it("successfully finds a row", () => - expectQuery({ oneOf: { user: [user1._id] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { user: [user1._id] } }).toContainExactly([ { user: { _id: user1._id } }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ oneOf: { user: ["us_none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { user: ["us_none"] } }).toFindNothing() + }) }) describe("empty", () => { - it("finds empty rows", () => - expectQuery({ empty: { user: null } }).toContainExactly([{}])) + it("finds empty rows", async () => { + await expectQuery({ empty: { user: null } }).toContainExactly([{}]) + }) }) describe("notEmpty", () => { - it("finds non-empty rows", () => - expectQuery({ notEmpty: { user: null } }).toContainExactly([ + it("finds non-empty rows", async () => { + await expectQuery({ notEmpty: { user: null } }).toContainExactly([ { user: { _id: user1._id } }, { user: { _id: user2._id } }, - ])) + ]) + }) }) }) @@ -1689,105 +1929,219 @@ describe.each([ }) describe("contains", () => { - it("successfully finds a row", () => - expectQuery({ contains: { users: [user1._id] } }).toContainExactly([ + it("successfully finds a row", async () => { + await expectQuery({ + contains: { users: [user1._id] }, + }).toContainExactly([ { users: [{ _id: user1._id }] }, { users: [{ _id: user1._id }, { _id: user2._id }] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ contains: { users: ["us_none"] } }).toFindNothing()) + it("successfully finds a row searching with a string", async () => { + await expectQuery({ + // @ts-expect-error this test specifically goes against the type to + // test that we coerce the string to an array. + contains: { "1:users": user1._id }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ contains: { users: ["us_none"] } }).toFindNothing() + }) }) describe("notContains", () => { - it("successfully finds a row", () => - expectQuery({ notContains: { users: [user1._id] } }).toContainExactly([ - { users: [{ _id: user2._id }] }, - {}, - ])) + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { users: [user1._id] }, + }).toContainExactly([{ users: [{ _id: user2._id }] }, {}]) + }) - it("fails to find nonexistent row", () => - expectQuery({ notContains: { users: ["us_none"] } }).toContainExactly([ + it("fails to find nonexistent row", async () => { + await expectQuery({ + notContains: { users: ["us_none"] }, + }).toContainExactly([ { users: [{ _id: user1._id }] }, { users: [{ _id: user2._id }] }, { users: [{ _id: user1._id }, { _id: user2._id }] }, {}, - ])) + ]) + }) }) describe("containsAny", () => { - it("successfully finds rows", () => - expectQuery({ + it("successfully finds rows", async () => { + await expectQuery({ containsAny: { users: [user1._id, user2._id] }, }).toContainExactly([ { users: [{ _id: user1._id }] }, { users: [{ _id: user2._id }] }, { users: [{ _id: user1._id }, { _id: user2._id }] }, - ])) + ]) + }) - it("fails to find nonexistent row", () => - expectQuery({ containsAny: { users: ["us_none"] } }).toFindNothing()) + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { users: ["us_none"] }, + }).toFindNothing() + }) }) describe("multi-column equals", () => { - it("successfully finds a row", () => - expectQuery({ + it("successfully finds a row", async () => { + await expectQuery({ equal: { number: 1 }, contains: { users: [user1._id] }, - }).toContainExactly([{ users: [{ _id: user1._id }], number: 1 }])) + }).toContainExactly([{ users: [{ _id: user1._id }], number: 1 }]) + }) - it("fails to find nonexistent row", () => - expectQuery({ + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { number: 2 }, contains: { users: [user1._id] }, - }).toFindNothing()) + }).toFindNothing() + }) }) }) // This will never work for Lucene. !isLucene && + // It also can't work for in-memory searching because the related table name + // isn't available. + !isInMemory && describe("relations", () => { - let otherTable: Table - let rows: Row[] + let productCategoryTable: Table, productCatRows: Row[] beforeAll(async () => { - otherTable = await createTable({ - one: { name: "one", type: FieldType.STRING }, - }) - table = await createTable({ - two: { name: "two", type: FieldType.STRING }, - other: { - type: FieldType.LINK, - relationshipType: RelationshipType.ONE_TO_MANY, - name: "other", - fieldName: "other", - tableId: otherTable._id!, - constraints: { - type: "array", + productCategoryTable = await createTable( + { + name: { name: "name", type: FieldType.STRING }, + }, + "productCategory" + ) + table = await createTable( + { + name: { name: "name", type: FieldType.STRING }, + productCat: { + type: FieldType.LINK, + relationshipType: RelationshipType.ONE_TO_MANY, + name: "productCat", + fieldName: "product", + tableId: productCategoryTable._id!, + constraints: { + type: "array", + }, }, }, - }) + "product" + ) - rows = await Promise.all([ - config.api.row.save(otherTable._id!, { one: "foo" }), - config.api.row.save(otherTable._id!, { one: "bar" }), + productCatRows = await Promise.all([ + config.api.row.save(productCategoryTable._id!, { name: "foo" }), + config.api.row.save(productCategoryTable._id!, { name: "bar" }), ]) await Promise.all([ config.api.row.save(table._id!, { - two: "foo", - other: [rows[0]._id], + name: "foo", + productCat: [productCatRows[0]._id], }), config.api.row.save(table._id!, { - two: "bar", - other: [rows[1]._id], + name: "bar", + productCat: [productCatRows[1]._id], + }), + config.api.row.save(table._id!, { + name: "baz", + productCat: [], }), ]) }) - it("can search through relations", () => - expectQuery({ - equal: { [`${otherTable.name}.one`]: "foo" }, - }).toContainExactly([{ two: "foo", other: [{ _id: rows[0]._id }] }])) + it("should be able to filter by relationship using column name", async () => { + await expectQuery({ + equal: { ["productCat.name"]: "foo" }, + }).toContainExactly([ + { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, + ]) + }) + + it("should be able to filter by relationship using table name", async () => { + await expectQuery({ + equal: { ["productCategory.name"]: "foo" }, + }).toContainExactly([ + { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, + ]) + }) + + it("shouldn't return any relationship for last row", async () => { + await expectQuery({ + equal: { ["name"]: "baz" }, + }).toContainExactly([{ name: "baz", productCat: undefined }]) + }) }) + + // lucene can't count the total rows + !isLucene && + describe("row counting", () => { + beforeAll(async () => { + table = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + await createRows([{ name: "a" }, { name: "b" }]) + }) + + it("should be able to count rows when option set", async () => { + await expectSearch({ + countRows: true, + query: { + notEmpty: { + name: true, + }, + }, + }).toMatch({ totalRows: 2, rows: expect.any(Array) }) + }) + + it("shouldn't count rows when option is not set", async () => { + await expectSearch({ + countRows: false, + query: { + notEmpty: { + name: true, + }, + }, + }).toNotHaveProperty(["totalRows"]) + }) + }) + + describe.each(["data_name_test", "name_data_test", "name_test_data_"])( + "special (%s) case", + column => { + beforeAll(async () => { + table = await createTable({ + [column]: { + name: column, + type: FieldType.STRING, + }, + }) + await createRows([{ [column]: "a" }, { [column]: "b" }]) + }) + + it("should be able to query a column with data_ in it", async () => { + await expectSearch({ + query: { + equal: { + [`1:${column}`]: "a", + }, + }, + }).toContainExactly([{ [column]: "a" }]) + }) + } + ) }) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index f23e0de6db..8102966ad1 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -276,6 +276,31 @@ describe.each([ }) }) + isInternal && + it("shouldn't allow duplicate column names", async () => { + const saveTableRequest: SaveTableRequest = { + ...basicTable(), + } + saveTableRequest.schema["Type"] = { + type: FieldType.STRING, + name: "Type", + } + // allow the "Type" column - internal columns aren't case sensitive + await config.api.table.save(saveTableRequest, { + status: 200, + }) + saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" } + saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" } + + await config.api.table.save(saveTableRequest, { + status: 400, + body: { + message: + 'Column(s) "foo" are duplicated - check for other columns with these name (case in-sensitive)', + }, + }) + }) + it("should add a new column for an internal DB table", async () => { const saveTableRequest: SaveTableRequest = { ...basicTable(), diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts index 8a843551ac..27d8592849 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts @@ -158,15 +158,16 @@ export const getDB = () => { return context.getAppDB() } -export const testAutomation = async (config: any, automation: any) => { +export const testAutomation = async ( + config: any, + automation: any, + triggerInputs: any +) => { return runRequest(automation.appId, async () => { return await config.request .post(`/api/automations/${automation._id}/test`) .send({ - row: { - name: "Test", - description: "TEST", - }, + ...triggerInputs, }) .set(config.defaultHeaders()) .expect("Content-Type", /json/) diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index 8069fadf10..99ff4f8db7 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -7,6 +7,7 @@ import { INTERNAL_TABLE_SOURCE_ID, PermissionLevel, QuotaUsageType, + Row, SaveTableRequest, SearchFilterOperator, SortOrder, @@ -17,28 +18,30 @@ import { UpdateViewRequest, ViewUIFieldMetadata, ViewV2, + SearchResponse, } from "@budibase/types" import { generator, mocks } from "@budibase/backend-core/tests" import { DatabaseName, getDatasource } from "../../../integrations/tests/utils" import merge from "lodash/merge" import { quotas } from "@budibase/pro" -import { roles } from "@budibase/backend-core" -import * as schemaUtils from "../../../utilities/schema" - -jest.mock("../../../utilities/schema") +import { db, roles } from "@budibase/backend-core" describe.each([ - ["internal", undefined], + ["lucene", undefined], + ["sqs", undefined], [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)], [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)], [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)], [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)], -])("/v2/views (%s)", (_, dsProvider) => { +])("/v2/views (%s)", (name, dsProvider) => { const config = setup.getConfig() - const isInternal = !dsProvider + const isSqs = name === "sqs" + const isLucene = name === "lucene" + const isInternal = isSqs || isLucene let table: Table let datasource: Datasource + let envCleanup: (() => void) | undefined function saveTableRequest( ...overrides: Partial>[] @@ -85,6 +88,9 @@ describe.each([ } beforeAll(async () => { + if (isSqs) { + envCleanup = config.setEnv({ SQS_SEARCH_ENABLE: "true" }) + } await config.init() if (dsProvider) { @@ -97,6 +103,9 @@ describe.each([ afterAll(async () => { setup.afterAll() + if (envCleanup) { + envCleanup() + } }) beforeEach(() => { @@ -120,6 +129,9 @@ describe.each([ const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, + schema: { + id: { visible: true }, + }, } const res = await config.api.viewV2.create(newView) @@ -134,7 +146,7 @@ describe.each([ const newView: Required = { name: generator.name(), tableId: table._id!, - primaryDisplay: generator.word(), + primaryDisplay: "id", query: [ { operator: SearchFilterOperator.EQUAL, @@ -148,6 +160,7 @@ describe.each([ type: SortType.STRING, }, schema: { + id: { visible: true }, Price: { visible: true, }, @@ -158,6 +171,7 @@ describe.each([ expect(res).toEqual({ ...newView, schema: { + id: { visible: true }, Price: { visible: true, }, @@ -172,6 +186,11 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { + name: "id", + type: FieldType.NUMBER, + visible: true, + }, Price: { name: "Price", type: FieldType.NUMBER, @@ -193,6 +212,7 @@ describe.each([ expect(createdView).toEqual({ ...newView, schema: { + id: { visible: true }, Price: { visible: true, order: 1, @@ -209,6 +229,12 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { + name: "id", + type: FieldType.AUTO, + autocolumn: true, + visible: true, + }, Price: { name: "Price", type: FieldType.NUMBER, @@ -230,8 +256,9 @@ describe.each([ const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, - primaryDisplay: generator.word(), + primaryDisplay: "id", schema: { + id: { visible: true }, Price: { visible: true }, Category: { visible: false }, }, @@ -241,6 +268,7 @@ describe.each([ expect(res).toEqual({ ...newView, schema: { + id: { visible: true }, Price: { visible: true, }, @@ -255,6 +283,7 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { visible: true }, nonExisting: { visible: true, }, @@ -293,6 +322,7 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { visible: true }, name: { visible: true, readonly: true, @@ -306,6 +336,7 @@ describe.each([ const res = await config.api.viewV2.create(newView) expect(res.schema).toEqual({ + id: { visible: true }, name: { visible: true, readonly: true, @@ -318,15 +349,13 @@ describe.each([ }) it("required fields cannot be marked as readonly", async () => { - const isRequiredSpy = jest.spyOn(schemaUtils, "isRequired") - isRequiredSpy.mockReturnValueOnce(true) - const table = await config.api.table.save( saveTableRequest({ schema: { name: { name: "name", type: FieldType.STRING, + constraints: { presence: true }, }, description: { name: "description", @@ -340,7 +369,9 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { visible: true }, name: { + visible: true, readonly: true, }, }, @@ -350,7 +381,7 @@ describe.each([ status: 400, body: { message: - 'Field "name" cannot be readonly as it is a required field', + 'You can\'t make "name" readonly because it is a required field.', status: 400, }, }) @@ -376,6 +407,7 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { visible: true }, name: { visible: false, readonly: true, @@ -414,6 +446,7 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { visible: true }, name: { visible: true, readonly: true, @@ -424,12 +457,84 @@ describe.each([ await config.api.viewV2.create(newView, { status: 400, body: { - message: "Readonly fields are not enabled for your tenant", + message: "Readonly fields are not enabled", status: 400, }, }) }) }) + + it("display fields must be visible", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + primaryDisplay: "name", + schema: { + id: { visible: true }, + name: { + visible: false, + }, + }, + } + + await config.api.viewV2.create(newView, { + status: 400, + body: { + message: 'You can\'t hide "name" because it is the display column.', + status: 400, + }, + }) + }) + + it("display fields can be readonly", async () => { + mocks.licenses.useViewReadonlyColumns() + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + primaryDisplay: "name", + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + }, + } + + await config.api.viewV2.create(newView, { + status: 201, + }) + }) }) describe("update", () => { @@ -441,6 +546,9 @@ describe.each([ view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), + schema: { + id: { visible: true }, + }, }) }) @@ -475,7 +583,7 @@ describe.each([ id: view.id, tableId, name: view.name, - primaryDisplay: generator.word(), + primaryDisplay: "Price", query: [ { operator: SearchFilterOperator.EQUAL, @@ -489,6 +597,7 @@ describe.each([ type: SortType.STRING, }, schema: { + id: { visible: true }, Category: { visible: false, }, @@ -506,7 +615,7 @@ describe.each([ schema: { ...table.schema, id: expect.objectContaining({ - visible: false, + visible: true, }), Category: expect.objectContaining({ visible: false, @@ -603,6 +712,9 @@ describe.each([ const anotherView = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), + schema: { + id: { visible: true }, + }, }) const result = await config .request!.put(`/api/v2/views/${anotherView.id}`) @@ -621,6 +733,7 @@ describe.each([ const updatedView = await config.api.viewV2.update({ ...view, schema: { + ...view.schema, Price: { name: "Price", type: FieldType.NUMBER, @@ -640,6 +753,7 @@ describe.each([ expect(updatedView).toEqual({ ...view, schema: { + id: { visible: true }, Price: { visible: true, order: 1, @@ -656,6 +770,7 @@ describe.each([ { ...view, schema: { + ...view.schema, Price: { name: "Price", type: FieldType.NUMBER, @@ -679,6 +794,7 @@ describe.each([ view = await config.api.viewV2.update({ ...view, schema: { + id: { visible: true }, Price: { visible: true, readonly: true, @@ -690,7 +806,7 @@ describe.each([ await config.api.viewV2.update(view, { status: 400, body: { - message: "Readonly fields are not enabled for your tenant", + message: "Readonly fields are not enabled", }, }) }) @@ -701,6 +817,7 @@ describe.each([ view = await config.api.viewV2.update({ ...view, schema: { + id: { visible: true }, Price: { visible: true, readonly: true, @@ -715,6 +832,7 @@ describe.each([ const res = await config.api.viewV2.update({ ...view, schema: { + id: { visible: true }, Price: { visible: true, readonly: false, @@ -725,6 +843,7 @@ describe.each([ expect.objectContaining({ ...view, schema: { + id: { visible: true }, Price: { visible: true, readonly: false, @@ -733,6 +852,53 @@ describe.each([ }) ) }) + + isInternal && + it("updating schema will only validate modified field", async () => { + let view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + Price: { + visible: true, + }, + Category: { visible: true }, + }, + }) + + // Update the view to an invalid state + const tableToUpdate = await config.api.table.get(table._id!) + ;(tableToUpdate.views![view.name] as ViewV2).schema!.id.visible = false + await db.getDB(config.appId!).put(tableToUpdate) + + view = await config.api.viewV2.get(view.id) + await config.api.viewV2.update({ + ...view, + schema: { + ...view.schema, + Price: { + visible: false, + }, + }, + }) + + expect(await config.api.viewV2.get(view.id)).toEqual( + expect.objectContaining({ + schema: { + id: expect.objectContaining({ + visible: false, + }), + Price: expect.objectContaining({ + visible: false, + }), + Category: expect.objectContaining({ + visible: true, + }), + }, + }) + ) + }) }) describe("delete", () => { @@ -742,6 +908,9 @@ describe.each([ view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), + schema: { + id: { visible: true }, + }, }) }) @@ -764,6 +933,7 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { visible: true }, Price: { visible: false }, Category: { visible: true }, }, @@ -786,6 +956,7 @@ describe.each([ name: generator.name(), tableId: table._id!, schema: { + id: { visible: true }, Price: { visible: true, readonly: true }, }, }) @@ -821,6 +992,7 @@ describe.each([ tableId: table._id!, name: generator.guid(), schema: { + id: { visible: true }, Country: { visible: true, }, @@ -855,6 +1027,7 @@ describe.each([ tableId: table._id!, name: generator.guid(), schema: { + id: { visible: true }, two: { visible: true }, }, }) @@ -880,6 +1053,7 @@ describe.each([ tableId: table._id!, name: generator.guid(), schema: { + id: { visible: true }, one: { visible: true, readonly: true }, two: { visible: true }, }, @@ -921,6 +1095,7 @@ describe.each([ tableId: table._id!, name: generator.guid(), schema: { + id: { visible: true }, one: { visible: true, readonly: true }, two: { visible: true }, }, @@ -948,7 +1123,7 @@ describe.each([ const createdRow = await config.api.row.save(table._id!, {}) const rowUsage = await getRowUsage() await config.api.row.bulkDelete(view.id, { rows: [createdRow] }) - await assertRowUsage(rowUsage - 1) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) await config.api.row.get(table._id!, createdRow._id!, { status: 404, }) @@ -964,7 +1139,7 @@ describe.each([ await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] }) - await assertRowUsage(rowUsage - 2) + await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) await config.api.row.get(table._id!, rows[0]._id!, { status: 404, @@ -988,6 +1163,7 @@ describe.each([ rows.map(r => ({ _viewId: view.id, tableId: table._id, + id: r.id, _id: r._id, _rev: r._rev, ...(isInternal @@ -1028,6 +1204,7 @@ describe.each([ }, ], schema: { + id: { visible: true }, two: { visible: true }, }, }) @@ -1039,6 +1216,7 @@ describe.each([ { _viewId: view.id, tableId: table._id, + id: two.id, two: two.two, _id: two._id, _rev: two._rev, @@ -1086,12 +1264,13 @@ describe.each([ paginate: true, limit: 4, query: {}, + countRows: true, }) expect(page1).toEqual({ rows: expect.arrayContaining(rows.slice(0, 4)), - totalRows: isInternal ? 10 : undefined, hasNextPage: true, bookmark: expect.anything(), + totalRows: 10, }) const page2 = await config.api.viewV2.search(view.id, { @@ -1099,12 +1278,13 @@ describe.each([ limit: 4, bookmark: page1.bookmark, query: {}, + countRows: true, }) expect(page2).toEqual({ rows: expect.arrayContaining(rows.slice(4, 8)), - totalRows: isInternal ? 10 : undefined, hasNextPage: true, bookmark: expect.anything(), + totalRows: 10, }) const page3 = await config.api.viewV2.search(view.id, { @@ -1112,13 +1292,17 @@ describe.each([ limit: 4, bookmark: page2.bookmark, query: {}, + countRows: true, }) - expect(page3).toEqual({ + const expectation: SearchResponse = { rows: expect.arrayContaining(rows.slice(8)), - totalRows: isInternal ? 10 : undefined, hasNextPage: false, - bookmark: expect.anything(), - }) + totalRows: 10, + } + if (isLucene) { + expectation.bookmark = expect.anything() + } + expect(page3).toEqual(expectation) }) const sortTestOptions: [ @@ -1192,7 +1376,11 @@ describe.each([ describe("sorting", () => { let table: Table - const viewSchema = { age: { visible: true }, name: { visible: true } } + const viewSchema = { + id: { visible: true }, + age: { visible: true }, + name: { visible: true }, + } beforeAll(async () => { table = await config.api.table.save( @@ -1348,4 +1536,123 @@ describe.each([ }) }) }) + + describe("updating table schema", () => { + describe("existing columns changed to required", () => { + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + id: { + name: "id", + type: FieldType.AUTO, + autocolumn: true, + }, + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + }) + + it("allows updating when no views constrains the field", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { + id: { visible: true }, + name: { visible: true }, + }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: { allowEmpty: false } }, + }, + }, + }, + { status: 200 } + ) + }) + + it("rejects if field is readonly in any view", async () => { + mocks.licenses.useViewReadonlyColumns() + + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }, + { + status: 400, + body: { + status: 400, + message: + 'To make field "name" required, this field must be present and writable in views: view a.', + }, + } + ) + }) + + it("rejects if field is hidden in any view", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { id: { visible: true } }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }, + { + status: 400, + body: { + status: 400, + message: + 'To make field "name" required, this field must be present and writable in views: view a.', + }, + } + ) + }) + }) + }) }) diff --git a/packages/server/src/api/routes/utils/validators.ts b/packages/server/src/api/routes/utils/validators.ts index 424d0d6c79..671ce95038 100644 --- a/packages/server/src/api/routes/utils/validators.ts +++ b/packages/server/src/api/routes/utils/validators.ts @@ -1,51 +1,89 @@ import { auth, permissions } from "@budibase/backend-core" import { DataSourceOperation } from "../../../constants" -import { WebhookActionType } from "@budibase/types" -import Joi from "joi" -import { ValidSnippetNameRegex } from "@budibase/shared-core" +import { Table, WebhookActionType } from "@budibase/types" +import Joi, { CustomValidator } from "joi" +import { ValidSnippetNameRegex, helpers } from "@budibase/shared-core" +import sdk from "../../../sdk" + +const { isRequired } = helpers.schema const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("") const OPTIONAL_NUMBER = Joi.number().optional().allow(null) const OPTIONAL_BOOLEAN = Joi.boolean().optional().allow(null) const APP_NAME_REGEX = /^[\w\s]+$/ +const validateViewSchemas: CustomValidator = (table, helpers) => { + if (table.views && Object.entries(table.views).length) { + const requiredFields = Object.entries(table.schema) + .filter(([_, v]) => isRequired(v.constraints)) + .map(([key]) => key) + if (requiredFields.length) { + for (const view of Object.values(table.views)) { + if (!sdk.views.isV2(view)) { + continue + } + + const editableViewFields = Object.entries(view.schema || {}) + .filter(([_, f]) => f.visible && !f.readonly) + .map(([key]) => key) + const missingField = requiredFields.find( + f => !editableViewFields.includes(f) + ) + if (missingField) { + return helpers.message({ + custom: `To make field "${missingField}" required, this field must be present and writable in views: ${view.name}.`, + }) + } + } + } + } + return table +} + export function tableValidator() { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - _id: OPTIONAL_STRING, - _rev: OPTIONAL_STRING, - type: OPTIONAL_STRING.valid("table", "internal", "external"), - primaryDisplay: OPTIONAL_STRING, - schema: Joi.object().required(), - name: Joi.string().required(), - views: Joi.object(), - rows: Joi.array(), - }).unknown(true)) + return auth.joiValidator.body( + Joi.object({ + _id: OPTIONAL_STRING, + _rev: OPTIONAL_STRING, + type: OPTIONAL_STRING.valid("table", "internal", "external"), + primaryDisplay: OPTIONAL_STRING, + schema: Joi.object().required(), + name: Joi.string().required(), + views: Joi.object(), + rows: Joi.array(), + }) + .custom(validateViewSchemas) + .unknown(true), + { errorPrefix: "" } + ) } export function nameValidator() { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - name: OPTIONAL_STRING, - })) + return auth.joiValidator.body( + Joi.object({ + name: OPTIONAL_STRING, + }) + ) } export function datasourceValidator() { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - _id: Joi.string(), - _rev: Joi.string(), - type: OPTIONAL_STRING.allow("datasource_plus"), - relationships: Joi.array().items(Joi.object({ - from: Joi.string().required(), - to: Joi.string().required(), - cardinality: Joi.valid("1:N", "1:1", "N:N").required() - })), - }).unknown(true)) + return auth.joiValidator.body( + Joi.object({ + _id: Joi.string(), + _rev: Joi.string(), + type: OPTIONAL_STRING.allow("datasource_plus"), + relationships: Joi.array().items( + Joi.object({ + from: Joi.string().required(), + to: Joi.string().required(), + cardinality: Joi.valid("1:N", "1:1", "N:N").required(), + }) + ), + }).unknown(true) + ) } function filterObject() { - // prettier-ignore return Joi.object({ string: Joi.object().optional(), fuzzy: Joi.object().optional(), @@ -62,17 +100,21 @@ function filterObject() { } export function internalSearchValidator() { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - tableId: OPTIONAL_STRING, - query: filterObject(), - limit: OPTIONAL_NUMBER, - sort: OPTIONAL_STRING, - sortOrder: OPTIONAL_STRING, - sortType: OPTIONAL_STRING, - paginate: Joi.boolean(), - bookmark: Joi.alternatives().try(OPTIONAL_STRING, OPTIONAL_NUMBER).optional(), - })) + return auth.joiValidator.body( + Joi.object({ + tableId: OPTIONAL_STRING, + query: filterObject(), + limit: OPTIONAL_NUMBER, + sort: OPTIONAL_STRING, + sortOrder: OPTIONAL_STRING, + sortType: OPTIONAL_STRING, + paginate: Joi.boolean(), + countRows: Joi.boolean(), + bookmark: Joi.alternatives() + .try(OPTIONAL_STRING, OPTIONAL_NUMBER) + .optional(), + }) + ) } export function externalSearchValidator() { @@ -94,92 +136,110 @@ export function externalSearchValidator() { } export function datasourceQueryValidator() { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - endpoint: Joi.object({ - datasourceId: Joi.string().required(), - operation: Joi.string().required().valid(...Object.values(DataSourceOperation)), - entityId: Joi.string().required(), - }).required(), - resource: Joi.object({ - fields: Joi.array().items(Joi.string()).optional(), - }).optional(), - body: Joi.object().optional(), - sort: Joi.object().optional(), - filters: filterObject().optional(), - paginate: Joi.object({ - page: Joi.string().alphanum().optional(), - limit: Joi.number().optional(), - }).optional(), - })) + return auth.joiValidator.body( + Joi.object({ + endpoint: Joi.object({ + datasourceId: Joi.string().required(), + operation: Joi.string() + .required() + .valid(...Object.values(DataSourceOperation)), + entityId: Joi.string().required(), + }).required(), + resource: Joi.object({ + fields: Joi.array().items(Joi.string()).optional(), + }).optional(), + body: Joi.object().optional(), + sort: Joi.object().optional(), + filters: filterObject().optional(), + paginate: Joi.object({ + page: Joi.string().alphanum().optional(), + limit: Joi.number().optional(), + }).optional(), + }) + ) } export function webhookValidator() { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - live: Joi.bool(), - _id: OPTIONAL_STRING, - _rev: OPTIONAL_STRING, - name: Joi.string().required(), - bodySchema: Joi.object().optional(), - action: Joi.object({ - type: Joi.string().required().valid(WebhookActionType.AUTOMATION), - target: Joi.string().required(), - }).required(), - }).unknown(true)) + return auth.joiValidator.body( + Joi.object({ + live: Joi.bool(), + _id: OPTIONAL_STRING, + _rev: OPTIONAL_STRING, + name: Joi.string().required(), + bodySchema: Joi.object().optional(), + action: Joi.object({ + type: Joi.string().required().valid(WebhookActionType.AUTOMATION), + target: Joi.string().required(), + }).required(), + }).unknown(true) + ) } export function roleValidator() { const permLevelArray = Object.values(permissions.PermissionLevel) - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - _id: OPTIONAL_STRING, - _rev: OPTIONAL_STRING, - name: Joi.string().regex(/^[a-zA-Z0-9_]*$/).required(), - // this is the base permission ID (for now a built in) - permissionId: Joi.string().valid(...Object.values(permissions.BuiltinPermissionID)).required(), - permissions: Joi.object() - .pattern(/.*/, [Joi.string().valid(...permLevelArray)]) - .optional(), - inherits: OPTIONAL_STRING, - }).unknown(true)) + + return auth.joiValidator.body( + Joi.object({ + _id: OPTIONAL_STRING, + _rev: OPTIONAL_STRING, + name: Joi.string() + .regex(/^[a-zA-Z0-9_]*$/) + .required(), + // this is the base permission ID (for now a built in) + permissionId: Joi.string() + .valid(...Object.values(permissions.BuiltinPermissionID)) + .required(), + permissions: Joi.object() + .pattern(/.*/, [Joi.string().valid(...permLevelArray)]) + .optional(), + inherits: OPTIONAL_STRING, + }).unknown(true) + ) } export function permissionValidator() { const permLevelArray = Object.values(permissions.PermissionLevel) - // prettier-ignore - return auth.joiValidator.params(Joi.object({ - level: Joi.string().valid(...permLevelArray).required(), - resourceId: Joi.string(), - roleId: Joi.string(), - }).unknown(true)) + + return auth.joiValidator.params( + Joi.object({ + level: Joi.string() + .valid(...permLevelArray) + .required(), + resourceId: Joi.string(), + roleId: Joi.string(), + }).unknown(true) + ) } export function screenValidator() { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - name: Joi.string().required(), - showNavigation: OPTIONAL_BOOLEAN, - width: OPTIONAL_STRING, - routing: Joi.object({ - route: Joi.string().required(), - roleId: Joi.string().required().allow(""), - homeScreen: OPTIONAL_BOOLEAN, - }).required().unknown(true), - props: Joi.object({ - _id: Joi.string().required(), - _component: Joi.string().required(), - _children: Joi.array().required(), - _styles: Joi.object().required(), - type: OPTIONAL_STRING, - table: OPTIONAL_STRING, - layoutId: OPTIONAL_STRING, - }).required().unknown(true), - }).unknown(true)) + return auth.joiValidator.body( + Joi.object({ + name: Joi.string().required(), + showNavigation: OPTIONAL_BOOLEAN, + width: OPTIONAL_STRING, + routing: Joi.object({ + route: Joi.string().required(), + roleId: Joi.string().required().allow(""), + homeScreen: OPTIONAL_BOOLEAN, + }) + .required() + .unknown(true), + props: Joi.object({ + _id: Joi.string().required(), + _component: Joi.string().required(), + _children: Joi.array().required(), + _styles: Joi.object().required(), + type: OPTIONAL_STRING, + table: OPTIONAL_STRING, + layoutId: OPTIONAL_STRING, + }) + .required() + .unknown(true), + }).unknown(true) + ) } function generateStepSchema(allowStepTypes: string[]) { - // prettier-ignore return Joi.object({ stepId: Joi.string().required(), id: Joi.string().required(), @@ -189,33 +249,39 @@ function generateStepSchema(allowStepTypes: string[]) { icon: Joi.string().required(), params: Joi.object(), args: Joi.object(), - type: Joi.string().required().valid(...allowStepTypes), + type: Joi.string() + .required() + .valid(...allowStepTypes), }).unknown(true) } export function automationValidator(existing = false) { - // prettier-ignore - return auth.joiValidator.body(Joi.object({ - _id: existing ? Joi.string().required() : OPTIONAL_STRING, - _rev: existing ? Joi.string().required() : OPTIONAL_STRING, - name: Joi.string().required(), - type: Joi.string().valid("automation").required(), - definition: Joi.object({ - steps: Joi.array().required().items(generateStepSchema(["ACTION", "LOGIC"])), - trigger: generateStepSchema(["TRIGGER"]).allow(null), - }).required().unknown(true), - }).unknown(true)) + return auth.joiValidator.body( + Joi.object({ + _id: existing ? Joi.string().required() : OPTIONAL_STRING, + _rev: existing ? Joi.string().required() : OPTIONAL_STRING, + name: Joi.string().required(), + type: Joi.string().valid("automation").required(), + definition: Joi.object({ + steps: Joi.array() + .required() + .items(generateStepSchema(["ACTION", "LOGIC"])), + trigger: generateStepSchema(["TRIGGER"]).allow(null), + }) + .required() + .unknown(true), + }).unknown(true) + ) } export function applicationValidator(opts = { isCreate: true }) { - // prettier-ignore const base: any = { _id: OPTIONAL_STRING, _rev: OPTIONAL_STRING, url: OPTIONAL_STRING, template: Joi.object({ templateString: OPTIONAL_STRING, - }) + }), } const appNameValidator = Joi.string() diff --git a/packages/server/src/appMigrations/appMigrationMetadata.ts b/packages/server/src/appMigrations/appMigrationMetadata.ts index 202e78d964..fb3310619e 100644 --- a/packages/server/src/appMigrations/appMigrationMetadata.ts +++ b/packages/server/src/appMigrations/appMigrationMetadata.ts @@ -1,4 +1,4 @@ -import { Duration, cache, context, db, env } from "@budibase/backend-core" +import { Duration, cache, db, env } from "@budibase/backend-core" import { Database, DocumentType, Document } from "@budibase/types" export interface AppMigrationDoc extends Document { @@ -23,17 +23,16 @@ const getCacheKey = (appId: string) => `appmigrations_${env.VERSION}_${appId}` export async function getAppMigrationVersion(appId: string): Promise { const cacheKey = getCacheKey(appId) - let metadata: AppMigrationDoc | undefined = await cache.get(cacheKey) + let version: string | undefined = await cache.get(cacheKey) - // We don't want to cache in dev, in order to be able to tweak it - if (metadata && !env.isDev()) { - return metadata.version + // returned cached version if we found one + if (version) { + return version } - let version try { - metadata = await getFromDB(appId) - version = metadata.version + const metadata = await getFromDB(appId) + version = metadata.version || "" } catch (err: any) { if (err.status !== 404) { throw err @@ -42,7 +41,10 @@ export async function getAppMigrationVersion(appId: string): Promise { version = "" } - await cache.store(cacheKey, version, EXPIRY_SECONDS) + // only cache if we have a valid version + if (version) { + await cache.store(cacheKey, version, EXPIRY_SECONDS) + } return version } @@ -54,8 +56,7 @@ export async function updateAppMigrationMetadata({ appId: string version: string }): Promise { - const db = context.getAppDB() - + const appDb = db.getDB(appId) let appMigrationDoc: AppMigrationDoc try { @@ -70,7 +71,7 @@ export async function updateAppMigrationMetadata({ version: "", history: {}, } - await db.put(appMigrationDoc) + await appDb.put(appMigrationDoc) appMigrationDoc = await getFromDB(appId) } @@ -82,7 +83,7 @@ export async function updateAppMigrationMetadata({ [version]: { runAt: new Date().toISOString() }, }, } - await db.put(updatedMigrationDoc) + await appDb.put(updatedMigrationDoc) const cacheKey = getCacheKey(appId) diff --git a/packages/server/src/appMigrations/index.ts b/packages/server/src/appMigrations/index.ts index 0758b9f324..de15666215 100644 --- a/packages/server/src/appMigrations/index.ts +++ b/packages/server/src/appMigrations/index.ts @@ -1,4 +1,4 @@ -import queue from "./queue" +import { getAppMigrationQueue } from "./queue" import { Next } from "koa" import { getAppMigrationVersion } from "./appMigrationMetadata" import { MIGRATIONS } from "./migrations" @@ -10,32 +10,55 @@ export * from "./appMigrationMetadata" export type AppMigration = { id: string func: () => Promise + // disabled so that by default all migrations listed are enabled + disabled?: boolean } -export const getLatestMigrationId = () => - MIGRATIONS.map(m => m.id) - .sort() - .reverse()[0] +export function getLatestEnabledMigrationId(migrations?: AppMigration[]) { + let latestMigrationId: string | undefined + if (!migrations) { + migrations = MIGRATIONS + } + for (let migration of migrations) { + // if a migration is disabled, all migrations after it are disabled + if (migration.disabled) { + break + } + latestMigrationId = migration.id + } + return latestMigrationId +} -const getTimestamp = (versionId: string) => versionId?.split("_")[0] || "" +function getTimestamp(versionId: string) { + return versionId?.split("_")[0] || "" +} export async function checkMissingMigrations( ctx: UserCtx, next: Next, appId: string ) { - const currentVersion = await getAppMigrationVersion(appId) - const latestMigration = getLatestMigrationId() + const latestMigration = getLatestEnabledMigrationId() - if (getTimestamp(currentVersion) < getTimestamp(latestMigration)) { + // no migrations set - edge case, don't try to do anything + if (!latestMigration) { + return next() + } + + const currentVersion = await getAppMigrationVersion(appId) + const queue = getAppMigrationQueue() + + if ( + queue && + latestMigration && + getTimestamp(currentVersion) < getTimestamp(latestMigration) + ) { await queue.add( { appId, }, { jobId: `${appId}_${latestMigration}`, - removeOnComplete: true, - removeOnFail: true, } ) diff --git a/packages/server/src/appMigrations/migrations.ts b/packages/server/src/appMigrations/migrations.ts index d66e2e8895..14eb9d0923 100644 --- a/packages/server/src/appMigrations/migrations.ts +++ b/packages/server/src/appMigrations/migrations.ts @@ -1,7 +1,15 @@ // This file should never be manually modified, use `yarn add-app-migration` in order to add a new one +import env from "../environment" import { AppMigration } from "." +import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs" + +// Migrations will be executed sorted by ID export const MIGRATIONS: AppMigration[] = [ - // Migrations will be executed sorted by id + { + id: "20240604153647_initial_sqs", + func: m20240604153647_initial_sqs, + disabled: !env.SQS_SEARCH_ENABLE, + }, ] diff --git a/packages/server/src/appMigrations/migrations/20240604153647_initial_sqs.ts b/packages/server/src/appMigrations/migrations/20240604153647_initial_sqs.ts new file mode 100644 index 0000000000..800de1418a --- /dev/null +++ b/packages/server/src/appMigrations/migrations/20240604153647_initial_sqs.ts @@ -0,0 +1,52 @@ +import { context } from "@budibase/backend-core" +import { allLinkDocs } from "../../db/utils" +import LinkDocumentImpl from "../../db/linkedRows/LinkDocument" +import sdk from "../../sdk" +import env from "../../environment" + +const migration = async () => { + const linkDocs = await allLinkDocs() + + const docsToUpdate = [] + for (const linkDoc of linkDocs) { + if (linkDoc.tableId) { + // It already had the required data + continue + } + + // it already has the junction table ID - no need to migrate + if (!linkDoc.tableId) { + const newLink = new LinkDocumentImpl( + linkDoc.doc1.tableId, + linkDoc.doc1.fieldName, + linkDoc.doc1.rowId, + linkDoc.doc2.tableId, + linkDoc.doc2.fieldName, + linkDoc.doc2.rowId + ) + newLink._id = linkDoc._id! + newLink._rev = linkDoc._rev + docsToUpdate.push(newLink) + } + } + + const db = context.getAppDB() + if (docsToUpdate.length) { + await db.bulkDocs(docsToUpdate) + } + + // at the end make sure design doc is ready + await sdk.tables.sqs.syncDefinition() + // only do initial search if environment is using SQS already + // initial search makes sure that all the indexes have been created + // and are ready to use, avoiding any initial waits for large tables + if (env.SQS_SEARCH_ENABLE) { + const tables = await sdk.tables.getAllInternalTables() + // do these one by one - running in parallel could cause problems + for (let table of tables) { + await db.sql(`select * from ${table._id} limit 1`) + } + } +} + +export default migration diff --git a/packages/server/src/appMigrations/migrations/tests/20240604153647_initial_sqs.spec.ts b/packages/server/src/appMigrations/migrations/tests/20240604153647_initial_sqs.spec.ts new file mode 100644 index 0000000000..572e694855 --- /dev/null +++ b/packages/server/src/appMigrations/migrations/tests/20240604153647_initial_sqs.spec.ts @@ -0,0 +1,129 @@ +import * as setup from "../../../api/routes/tests/utilities" +import { basicTable } from "../../../tests/utilities/structures" +import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core" +import { + LinkDocument, + DocumentType, + SQLiteDefinition, + SQLiteType, +} from "@budibase/types" +import { + generateJunctionTableID, + generateLinkID, + generateRowID, +} from "../../../db/utils" +import { processMigrations } from "../../migrationsProcessor" +import migration from "../20240604153647_initial_sqs" +import { AppMigration } from "src/appMigrations" +import sdk from "../../../sdk" + +const MIGRATIONS: AppMigration[] = [ + { + id: "20240604153647_initial_sqs", + func: migration, + disabled: false, + }, +] + +const config = setup.getConfig() +let tableId: string + +const prefix = sdk.tables.sqs.mapToUserColumn + +function oldLinkDocInfo() { + const tableId1 = `${DocumentType.TABLE}_a`, + tableId2 = `${DocumentType.TABLE}_b` + return { + tableId1, + tableId2, + rowId1: generateRowID(tableId1, "b"), + rowId2: generateRowID(tableId2, "a"), + col1: "columnB", + col2: "columnA", + } +} + +function oldLinkDocID() { + const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo() + return generateLinkID(tableId1, tableId2, rowId1, rowId2, col1, col2) +} + +function oldLinkDocument(): Omit { + const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo() + return { + type: "link", + _id: oldLinkDocID(), + doc1: { + tableId: tableId1, + fieldName: col1, + rowId: rowId1, + }, + doc2: { + tableId: tableId2, + fieldName: col2, + rowId: rowId2, + }, + } +} + +async function sqsDisabled(cb: () => Promise) { + await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb) +} + +async function sqsEnabled(cb: () => Promise) { + await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb) +} + +beforeAll(async () => { + await sqsDisabled(async () => { + await config.init() + const table = await config.api.table.save(basicTable()) + tableId = table._id! + const db = dbCore.getDB(config.appId!) + // old link document + await db.put(oldLinkDocument()) + }) +}) + +describe("SQS migration", () => { + it("test migration runs as expected against an older DB", async () => { + const db = dbCore.getDB(config.appId!) + // confirm nothing exists initially + await sqsDisabled(async () => { + let error: any | undefined + try { + await db.get(SQLITE_DESIGN_DOC_ID) + } catch (err: any) { + error = err + } + expect(error).toBeDefined() + expect(error.status).toBe(404) + }) + await sqsEnabled(async () => { + await processMigrations(config.appId!, MIGRATIONS) + const designDoc = await db.get(SQLITE_DESIGN_DOC_ID) + expect(designDoc.sql.tables).toBeDefined() + const mainTableDef = designDoc.sql.tables[tableId] + expect(mainTableDef).toBeDefined() + expect(mainTableDef.fields[prefix("name")]).toEqual({ + field: "name", + type: SQLiteType.TEXT, + }) + expect(mainTableDef.fields[prefix("description")]).toEqual({ + field: "description", + type: SQLiteType.TEXT, + }) + + const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo() + const linkDoc = await db.get(oldLinkDocID()) + expect(linkDoc.tableId).toEqual( + generateJunctionTableID(tableId1, tableId2) + ) + // should have swapped the documents + expect(linkDoc.doc1.tableId).toEqual(tableId2) + expect(linkDoc.doc1.rowId).toEqual(rowId2) + expect(linkDoc.doc2.tableId).toEqual(tableId1) + expect(linkDoc.doc2.rowId).toEqual(rowId1) + }) + }) +}) diff --git a/packages/server/src/appMigrations/migrationsProcessor.ts b/packages/server/src/appMigrations/migrationsProcessor.ts index 8d119f3864..1441388564 100644 --- a/packages/server/src/appMigrations/migrationsProcessor.ts +++ b/packages/server/src/appMigrations/migrationsProcessor.ts @@ -1,4 +1,4 @@ -import { context, locks } from "@budibase/backend-core" +import { context, locks, logging } from "@budibase/backend-core" import { LockName, LockType } from "@budibase/types" import { @@ -12,47 +12,58 @@ export async function processMigrations( migrations: AppMigration[] ) { console.log(`Processing app migration for "${appId}"`) + try { + // first step - setup full context - tenancy, app and guards + await context.doInAppMigrationContext(appId, async () => { + console.log(`Acquiring app migration lock for "${appId}"`) + await locks.doWithLock( + { + name: LockName.APP_MIGRATION, + type: LockType.AUTO_EXTEND, + resource: appId, + }, + async () => { + console.log(`Lock acquired starting app migration for "${appId}"`) + let currentVersion = await getAppMigrationVersion(appId) - await locks.doWithLock( - { - name: LockName.APP_MIGRATION, - type: LockType.AUTO_EXTEND, - resource: appId, - }, - async () => { - await context.doInAppMigrationContext(appId, async () => { - let currentVersion = await getAppMigrationVersion(appId) + const pendingMigrations = migrations + .filter(m => m.id > currentVersion) + .sort((a, b) => a.id.localeCompare(b.id)) - const pendingMigrations = migrations - .filter(m => m.id > currentVersion) - .sort((a, b) => a.id.localeCompare(b.id)) + const migrationIds = migrations.map(m => m.id).sort() + console.log( + `App migrations to run for "${appId}" - ${migrationIds.join(",")}` + ) - const migrationIds = migrations.map(m => m.id).sort() + let index = 0 + for (const { id, func } of pendingMigrations) { + const expectedMigration = + migrationIds[migrationIds.indexOf(currentVersion) + 1] - let index = 0 - for (const { id, func } of pendingMigrations) { - const expectedMigration = - migrationIds[migrationIds.indexOf(currentVersion) + 1] + if (expectedMigration !== id) { + throw new Error( + `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected` + ) + } - if (expectedMigration !== id) { - throw `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected` + const counter = `(${++index}/${pendingMigrations.length})` + console.info(`Running migration ${id}... ${counter}`, { + migrationId: id, + appId, + }) + await func() + await updateAppMigrationMetadata({ + appId, + version: id, + }) + currentVersion = id } - - const counter = `(${++index}/${pendingMigrations.length})` - console.info(`Running migration ${id}... ${counter}`, { - migrationId: id, - appId, - }) - await func() - await updateAppMigrationMetadata({ - appId, - version: id, - }) - currentVersion = id } - }) - } - ) - - console.log(`App migration for "${appId}" processed`) + ) + }) + console.log(`App migration for "${appId}" processed`) + } catch (err) { + logging.logAlert("Failed to run app migration", err) + throw err + } } diff --git a/packages/server/src/appMigrations/queue.ts b/packages/server/src/appMigrations/queue.ts index 72bb2f9b12..e2bc4406f1 100644 --- a/packages/server/src/appMigrations/queue.ts +++ b/packages/server/src/appMigrations/queue.ts @@ -1,15 +1,45 @@ -import { queue } from "@budibase/backend-core" +import { queue, logging } from "@budibase/backend-core" import { Job } from "bull" import { MIGRATIONS } from "./migrations" import { processMigrations } from "./migrationsProcessor" -const appMigrationQueue = queue.createQueue(queue.JobQueue.APP_MIGRATION) -appMigrationQueue.process(processMessage) +const MAX_ATTEMPTS = 3 +// max number of migrations to run at same time, per node +const MIGRATION_CONCURRENCY = 5 -async function processMessage(job: Job) { +export type AppMigrationJob = { + appId: string +} + +let appMigrationQueue: queue.Queue | undefined + +export function init() { + appMigrationQueue = queue.createQueue( + queue.JobQueue.APP_MIGRATION, + { + jobOptions: { + attempts: MAX_ATTEMPTS, + removeOnComplete: true, + removeOnFail: true, + }, + maxStalledCount: MAX_ATTEMPTS, + removeStalledCb: async (job: Job) => { + logging.logAlert( + `App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}` + ) + }, + } + ) + + return appMigrationQueue.process(MIGRATION_CONCURRENCY, processMessage) +} + +async function processMessage(job: Job) { const { appId } = job.data await processMigrations(appId, MIGRATIONS) } -export default appMigrationQueue +export function getAppMigrationQueue() { + return appMigrationQueue +} diff --git a/packages/server/src/appMigrations/tests/migrations.spec.ts b/packages/server/src/appMigrations/tests/migrations.spec.ts index 7af2346934..1da94f503f 100644 --- a/packages/server/src/appMigrations/tests/migrations.spec.ts +++ b/packages/server/src/appMigrations/tests/migrations.spec.ts @@ -1,6 +1,7 @@ import { Header } from "@budibase/backend-core" import * as setup from "../../api/routes/tests/utilities" import * as migrations from "../migrations" +import { AppMigration, getLatestEnabledMigrationId } from "../index" import { getAppMigrationVersion } from "../appMigrationMetadata" jest.mock("../migrations", () => ({ @@ -52,4 +53,29 @@ describe("migrations", () => { }, }) }) + + it("should disable all migrations after one that is disabled", () => { + const MIGRATION_ID1 = "20231211105810_new-test", + MIGRATION_ID2 = "20231211105812_new-test", + MIGRATION_ID3 = "20231211105814_new-test" + // create some migrations to test with + const migrations: AppMigration[] = [ + { + id: MIGRATION_ID1, + func: async () => {}, + }, + { + id: MIGRATION_ID2, + func: async () => {}, + }, + { + id: MIGRATION_ID3, + func: async () => {}, + }, + ] + + expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID3) + migrations[1].disabled = true + expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID1) + }) }) diff --git a/packages/server/src/automations/automationUtils.ts b/packages/server/src/automations/automationUtils.ts index de6e1b3d88..5467e0757c 100644 --- a/packages/server/src/automations/automationUtils.ts +++ b/packages/server/src/automations/automationUtils.ts @@ -99,6 +99,15 @@ export function getError(err: any) { return typeof err !== "string" ? err.toString() : err } +export function guardAttachment(attachmentObject: any) { + if (!("url" in attachmentObject) || !("filename" in attachmentObject)) { + const providedKeys = Object.keys(attachmentObject).join(", ") + throw new Error( + `Attachments must have both "url" and "filename" keys. You have provided: ${providedKeys}` + ) + } +} + export async function sendAutomationAttachmentsToStorage( tableId: string, row: Row @@ -116,9 +125,15 @@ export async function sendAutomationAttachmentsToStorage( schema?.type === FieldType.ATTACHMENT_SINGLE || schema?.type === FieldType.SIGNATURE_SINGLE ) { + if (Array.isArray(value)) { + value.forEach(item => guardAttachment(item)) + } else { + guardAttachment(value) + } attachmentRows[prop] = value } } + for (const [prop, attachments] of Object.entries(attachmentRows)) { if (Array.isArray(attachments)) { if (attachments.length) { @@ -133,7 +148,6 @@ export async function sendAutomationAttachmentsToStorage( return row } - async function generateAttachmentRow(attachment: AutomationAttachment) { const prodAppId = context.getProdAppId() diff --git a/packages/server/src/automations/bullboard.ts b/packages/server/src/automations/bullboard.ts index 34f18754a2..aa4287b2d0 100644 --- a/packages/server/src/automations/bullboard.ts +++ b/packages/server/src/automations/bullboard.ts @@ -3,6 +3,7 @@ import { KoaAdapter } from "@bull-board/koa" import { queue } from "@budibase/backend-core" import * as automation from "../threads/automation" import { backups } from "@budibase/pro" +import { getAppMigrationQueue } from "../appMigrations/queue" import { createBullBoard } from "@bull-board/api" import BullQueue from "bull" @@ -16,10 +17,14 @@ const PATH_PREFIX = "/bulladmin" export async function init() { // Set up queues for bull board admin const backupQueue = backups.getBackupQueue() + const appMigrationQueue = getAppMigrationQueue() const queues = [automationQueue] if (backupQueue) { queues.push(backupQueue) } + if (appMigrationQueue) { + queues.push(appMigrationQueue) + } const adapters = [] const serverAdapter: any = new KoaAdapter() for (let queue of queues) { diff --git a/packages/server/src/automations/loopUtils.ts b/packages/server/src/automations/loopUtils.ts new file mode 100644 index 0000000000..5ee2559050 --- /dev/null +++ b/packages/server/src/automations/loopUtils.ts @@ -0,0 +1,36 @@ +import * as automationUtils from "./automationUtils" + +type ObjValue = { + [key: string]: string | ObjValue +} + +export function replaceFakeBindings( + originalStepInput: Record, + loopStepNumber: number +) { + for (const [key, value] of Object.entries(originalStepInput)) { + originalStepInput[key] = replaceBindingsRecursive(value, loopStepNumber) + } + return originalStepInput +} + +function replaceBindingsRecursive( + value: string | ObjValue, + loopStepNumber: number +) { + if (typeof value === "object") { + for (const [innerKey, innerValue] of Object.entries(value)) { + if (typeof innerValue === "string") { + value[innerKey] = automationUtils.substituteLoopStep( + innerValue, + `steps.${loopStepNumber}` + ) + } else if (typeof innerValue === "object") { + value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber) + } + } + } else if (typeof value === "string") { + value = automationUtils.substituteLoopStep(value, `steps.${loopStepNumber}`) + } + return value +} diff --git a/packages/server/src/automations/steps/createRow.ts b/packages/server/src/automations/steps/createRow.ts index 5b5084b465..c7f5fcff3b 100644 --- a/packages/server/src/automations/steps/createRow.ts +++ b/packages/server/src/automations/steps/createRow.ts @@ -90,7 +90,6 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) { tableId: inputs.row.tableId, }, }) - try { inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row) inputs.row = await sendAutomationAttachmentsToStorage( diff --git a/packages/server/src/automations/steps/filter.ts b/packages/server/src/automations/steps/filter.ts index 6867809500..624619bb95 100644 --- a/packages/server/src/automations/steps/filter.ts +++ b/packages/server/src/automations/steps/filter.ts @@ -73,7 +73,12 @@ export async function run({ inputs }: AutomationStepInput) { try { let { field, condition, value } = inputs // coerce types so that we can use them - if (!isNaN(value) && !isNaN(field)) { + if ( + !isNaN(value) && + !isNaN(field) && + typeof field !== "boolean" && + typeof value !== "boolean" + ) { value = parseFloat(value) field = parseFloat(field) } else if (!isNaN(Date.parse(value)) && !isNaN(Date.parse(field))) { diff --git a/packages/server/src/automations/steps/openai.ts b/packages/server/src/automations/steps/openai.ts index bc926de7b7..45ef5ef703 100644 --- a/packages/server/src/automations/steps/openai.ts +++ b/packages/server/src/automations/steps/openai.ts @@ -1,4 +1,5 @@ -import { Configuration, OpenAIApi } from "openai" +import { OpenAI } from "openai" + import { AutomationActionStepId, AutomationStepSchema, @@ -75,13 +76,11 @@ export async function run({ inputs }: AutomationStepInput) { } try { - const configuration = new Configuration({ + const openai = new OpenAI({ apiKey: environment.OPENAI_API_KEY, }) - const openai = new OpenAIApi(configuration) - - const completion = await openai.createChatCompletion({ + const completion = await openai.chat.completions.create({ model: inputs.model, messages: [ { @@ -90,8 +89,7 @@ export async function run({ inputs }: AutomationStepInput) { }, ], }) - - const response = completion?.data?.choices[0]?.message?.content + const response = completion?.choices[0]?.message?.content return { response, diff --git a/packages/server/src/automations/steps/queryRows.ts b/packages/server/src/automations/steps/queryRows.ts index f3db410d8c..aa3efa5425 100644 --- a/packages/server/src/automations/steps/queryRows.ts +++ b/packages/server/src/automations/steps/queryRows.ts @@ -14,14 +14,10 @@ import { EmptyFilterOption, SearchFilters, Table, + SortOrder, } from "@budibase/types" import { db as dbCore } from "@budibase/backend-core" -enum SortOrder { - ASCENDING = "ascending", - DESCENDING = "descending", -} - const SortOrderPretty = { [SortOrder.ASCENDING]: "Ascending", [SortOrder.DESCENDING]: "Descending", diff --git a/packages/server/src/automations/steps/sendSmtpEmail.ts b/packages/server/src/automations/steps/sendSmtpEmail.ts index 31a7759dea..bcb1699c6b 100644 --- a/packages/server/src/automations/steps/sendSmtpEmail.ts +++ b/packages/server/src/automations/steps/sendSmtpEmail.ts @@ -118,6 +118,14 @@ export async function run({ inputs }: AutomationStepInput) { } to = to || undefined + if (attachments) { + if (Array.isArray(attachments)) { + attachments.forEach(item => automationUtils.guardAttachment(item)) + } else { + automationUtils.guardAttachment(attachments) + } + } + try { let response = await sendSmtpEmail({ to, diff --git a/packages/server/src/automations/tests/createRow.spec.ts b/packages/server/src/automations/tests/createRow.spec.ts index e78236c5ac..62e9e24f9e 100644 --- a/packages/server/src/automations/tests/createRow.spec.ts +++ b/packages/server/src/automations/tests/createRow.spec.ts @@ -128,4 +128,31 @@ describe("test the create row action", () => { expect(objectData).toBeDefined() expect(objectData.ContentLength).toBeGreaterThan(0) }) + + it("should check that attachment without the correct keys throws an error", async () => { + let attachmentTable = await config.createTable( + basicTableWithAttachmentField() + ) + + let attachmentRow: any = { + tableId: attachmentTable._id, + } + + let filename = "test2.txt" + let presignedUrl = await uploadTestFile(filename) + let attachmentObject = { + wrongKey: presignedUrl, + anotherWrongKey: filename, + } + + attachmentRow.single_file_attachment = attachmentObject + const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, { + row: attachmentRow, + }) + + expect(res.success).toEqual(false) + expect(res.response).toEqual( + 'Error: Attachments must have both "url" and "filename" keys. You have provided: wrongKey, anotherWrongKey' + ) + }) }) diff --git a/packages/server/src/automations/tests/openai.spec.ts b/packages/server/src/automations/tests/openai.spec.ts index bd73827cce..618c2d7754 100644 --- a/packages/server/src/automations/tests/openai.spec.ts +++ b/packages/server/src/automations/tests/openai.spec.ts @@ -1,15 +1,13 @@ const setup = require("./utilities") import environment from "../../environment" -import openai from "openai" +import { OpenAI } from "openai" -jest.mock( - "openai", - jest.fn(() => ({ - Configuration: jest.fn(), - OpenAIApi: jest.fn(() => ({ - createChatCompletion: jest.fn(() => ({ - data: { +jest.mock("openai", () => ({ + OpenAI: jest.fn().mockImplementation(() => ({ + chat: { + completions: { + create: jest.fn(() => ({ choices: [ { message: { @@ -17,15 +15,13 @@ jest.mock( }, }, ], - }, - })), - })), - })) -) + })), + }, + }, + })), +})) -const mockedOpenAIApi = openai.OpenAIApi as jest.MockedClass< - typeof openai.OpenAIApi -> +const mockedOpenAI = OpenAI as jest.MockedClass const OPENAI_PROMPT = "What is the meaning of life?" @@ -73,14 +69,18 @@ describe("test the openai action", () => { }) it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => { - mockedOpenAIApi.mockImplementation( + mockedOpenAI.mockImplementation( () => ({ - createChatCompletion: jest.fn(() => { - throw new Error( - "An error occurred while calling createChatCompletion" - ) - }), + chat: { + completions: { + create: jest.fn(() => { + throw new Error( + "An error occurred while calling createChatCompletion" + ) + }), + }, + }, } as any) ) diff --git a/packages/server/src/automations/triggerInfo/rowUpdated.ts b/packages/server/src/automations/triggerInfo/rowUpdated.ts index 5e60015808..eab7c40a09 100644 --- a/packages/server/src/automations/triggerInfo/rowUpdated.ts +++ b/packages/server/src/automations/triggerInfo/rowUpdated.ts @@ -27,10 +27,17 @@ export const definition: AutomationTriggerSchema = { }, outputs: { properties: { - row: { + oldRow: { type: AutomationIOType.OBJECT, customType: AutomationCustomIOType.ROW, description: "The row that was updated", + title: "Old Row", + }, + row: { + type: AutomationIOType.OBJECT, + customType: AutomationCustomIOType.ROW, + description: "The row before it was updated", + title: "Row", }, id: { type: AutomationIOType.STRING, diff --git a/packages/server/src/automations/triggers.ts b/packages/server/src/automations/triggers.ts index 223b8d2eb6..9aa80035bd 100644 --- a/packages/server/src/automations/triggers.ts +++ b/packages/server/src/automations/triggers.ts @@ -8,7 +8,13 @@ import { checkTestFlag } from "../utilities/redis" import * as utils from "./utils" import env from "../environment" import { context, db as dbCore } from "@budibase/backend-core" -import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types" +import { + Automation, + Row, + AutomationData, + AutomationJob, + UpdatedRowEventEmitter, +} from "@budibase/types" import { executeInThread } from "../threads/automation" export const TRIGGER_DEFINITIONS = definitions @@ -65,7 +71,7 @@ async function queueRelevantRowAutomations( }) } -emitter.on("row:save", async function (event) { +emitter.on("row:save", async function (event: UpdatedRowEventEmitter) { /* istanbul ignore next */ if (!event || !event.row || !event.row.tableId) { return diff --git a/packages/server/src/constants/index.ts b/packages/server/src/constants/index.ts index bc255ecb2a..60875b3daa 100644 --- a/packages/server/src/constants/index.ts +++ b/packages/server/src/constants/index.ts @@ -70,11 +70,6 @@ export enum DatasourceAuthTypes { GOOGLE = "google", } -export enum SortDirection { - ASCENDING = "ASCENDING", - DESCENDING = "DESCENDING", -} - export const USERS_TABLE_SCHEMA: Table = { _id: "ta_users", type: "table", diff --git a/packages/server/src/db/linkedRows/LinkDocument.ts b/packages/server/src/db/linkedRows/LinkDocument.ts index 8d25bbe93c..a67d21599c 100644 --- a/packages/server/src/db/linkedRows/LinkDocument.ts +++ b/packages/server/src/db/linkedRows/LinkDocument.ts @@ -59,6 +59,9 @@ class LinkDocumentImpl implements LinkDocument { this.doc1 = docA.tableId > docB.tableId ? docA : docB this.doc2 = docA.tableId > docB.tableId ? docB : docA } + _rev?: string | undefined + createdAt?: string | number | undefined + updatedAt?: string | undefined } export default LinkDocumentImpl diff --git a/packages/server/src/db/utils.ts b/packages/server/src/db/utils.ts index b8221d208d..3bd1749d77 100644 --- a/packages/server/src/db/utils.ts +++ b/packages/server/src/db/utils.ts @@ -1,5 +1,5 @@ import newid from "./newid" -import { db as dbCore } from "@budibase/backend-core" +import { context, db as dbCore } from "@budibase/backend-core" import { DatabaseQueryOpts, Datasource, @@ -10,6 +10,7 @@ import { RelationshipFieldMetadata, SourceName, VirtualDocumentType, + LinkDocument, } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types" @@ -137,10 +138,24 @@ export function generateLinkID( /** * Gets parameters for retrieving link docs, this is a utility function for the getDocParams function. */ -export function getLinkParams(otherProps: any = {}) { +function getLinkParams(otherProps: Partial = {}) { return getDocParams(DocumentType.LINK, null, otherProps) } +/** + * Gets all the link docs document from the current app db. + */ +export async function allLinkDocs() { + const db = context.getAppDB() + + const response = await db.allDocs( + getLinkParams({ + include_docs: true, + }) + ) + return response.rows.map(row => row.doc!) +} + /** * Generates a new layout ID. * @returns The new layout ID which the layout doc can be stored under. diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts index 2c91f1cb48..341483d861 100644 --- a/packages/server/src/environment.ts +++ b/packages/server/src/environment.ts @@ -48,6 +48,7 @@ const environment = { MINIO_URL: process.env.MINIO_URL, WORKER_URL: process.env.WORKER_URL, AWS_REGION: process.env.AWS_REGION, + AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, REDIS_URL: process.env.REDIS_URL, @@ -96,6 +97,7 @@ const environment = { DISABLE_THREADING: process.env.DISABLE_THREADING, DISABLE_AUTOMATION_LOGS: process.env.DISABLE_AUTOMATION_LOGS, DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING, + DISABLE_APP_MIGRATIONS: process.env.SKIP_APP_MIGRATIONS || false, MULTI_TENANCY: process.env.MULTI_TENANCY, ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS, SELF_HOSTED: process.env.SELF_HOSTED, diff --git a/packages/server/src/events/BudibaseEmitter.ts b/packages/server/src/events/BudibaseEmitter.ts index 43871d8754..8feb36bbf5 100644 --- a/packages/server/src/events/BudibaseEmitter.ts +++ b/packages/server/src/events/BudibaseEmitter.ts @@ -13,8 +13,14 @@ import { Table, Row } from "@budibase/types" * This is specifically quite important for template strings used in automations. */ class BudibaseEmitter extends EventEmitter { - emitRow(eventName: string, appId: string, row: Row, table?: Table) { - rowEmission({ emitter: this, eventName, appId, row, table }) + emitRow( + eventName: string, + appId: string, + row: Row, + table?: Table, + oldRow?: Row + ) { + rowEmission({ emitter: this, eventName, appId, row, table, oldRow }) } emitTable(eventName: string, appId: string, table?: Table) { diff --git a/packages/server/src/events/utils.ts b/packages/server/src/events/utils.ts index 20efb453f2..b972c8e473 100644 --- a/packages/server/src/events/utils.ts +++ b/packages/server/src/events/utils.ts @@ -7,6 +7,7 @@ type BBEventOpts = { appId: string table?: Table row?: Row + oldRow?: Row metadata?: any } @@ -18,6 +19,7 @@ type BBEvent = { appId: string tableId?: string row?: Row + oldRow?: Row table?: BBEventTable id?: string revision?: string @@ -31,9 +33,11 @@ export function rowEmission({ row, table, metadata, + oldRow, }: BBEventOpts) { let event: BBEvent = { row, + oldRow, appId, tableId: row?.tableId, } diff --git a/packages/server/src/integration-test/mysql.spec.ts b/packages/server/src/integration-test/mysql.spec.ts index 8cf4fb8212..eb6c840abc 100644 --- a/packages/server/src/integration-test/mysql.spec.ts +++ b/packages/server/src/integration-test/mysql.spec.ts @@ -1,24 +1,12 @@ -import fetch from "node-fetch" -import { - generateMakeRequest, - MakeRequestResponse, -} from "../api/routes/public/tests/utils" import * as setup from "../api/routes/tests/utilities" -import { - Datasource, - FieldType, - Table, - TableRequest, - TableSourceType, -} from "@budibase/types" +import { Datasource, FieldType } from "@budibase/types" import { DatabaseName, getDatasource, - rawQuery, + knexClient, } from "../integrations/tests/utils" import { generator } from "@budibase/backend-core/tests" -// @ts-ignore -fetch.mockSearch() +import { Knex } from "knex" function uniqueTableName(length?: number): string { return generator @@ -29,256 +17,75 @@ function uniqueTableName(length?: number): string { const config = setup.getConfig()! -jest.mock("../websockets", () => ({ - clientAppSocket: jest.fn(), - gridAppSocket: jest.fn(), - initialise: jest.fn(), - builderSocket: { - emitTableUpdate: jest.fn(), - emitTableDeletion: jest.fn(), - emitDatasourceUpdate: jest.fn(), - emitDatasourceDeletion: jest.fn(), - emitScreenUpdate: jest.fn(), - emitAppMetadataUpdate: jest.fn(), - emitAppPublish: jest.fn(), - }, -})) - describe("mysql integrations", () => { - let makeRequest: MakeRequestResponse, - rawDatasource: Datasource, - datasource: Datasource, - primaryMySqlTable: Table + let datasource: Datasource + let client: Knex beforeAll(async () => { await config.init() - const apiKey = await config.generateApiKey() - - makeRequest = generateMakeRequest(apiKey, true) - - rawDatasource = await getDatasource(DatabaseName.MYSQL) + const rawDatasource = await getDatasource(DatabaseName.MYSQL) datasource = await config.api.datasource.create(rawDatasource) - }) - - beforeEach(async () => { - primaryMySqlTable = await config.createTable({ - name: uniqueTableName(), - type: "table", - primary: ["id"], - schema: { - id: { - name: "id", - type: FieldType.AUTO, - autocolumn: true, - }, - name: { - name: "name", - type: FieldType.STRING, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - value: { - name: "value", - type: FieldType.NUMBER, - }, - }, - sourceId: datasource._id, - sourceType: TableSourceType.EXTERNAL, - }) + client = await knexClient(rawDatasource) }) afterAll(config.end) - it("validate table schema", async () => { - const res = await makeRequest("get", `/api/datasources/${datasource._id}`) - - expect(res.status).toBe(200) - expect(res.body).toEqual({ - config: { - database: expect.any(String), - host: datasource.config!.host, - password: "--secret-value--", - port: datasource.config!.port, - user: "root", - }, - plus: true, - source: "MYSQL", - type: "datasource_plus", - isSQL: true, - _id: expect.any(String), - _rev: expect.any(String), - createdAt: expect.any(String), - updatedAt: expect.any(String), - entities: expect.any(Object), - }) - }) - - describe("POST /api/datasources/verify", () => { - it("should be able to verify the connection", async () => { - await config.api.datasource.verify( - { - datasource: rawDatasource, - }, - { - body: { - connected: true, - }, - } - ) - }) - - it("should state an invalid datasource cannot connect", async () => { - await config.api.datasource.verify( - { - datasource: { - ...rawDatasource, - config: { - ...rawDatasource.config, - password: "wrongpassword", - }, - }, - }, - { - body: { - connected: false, - error: - "Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.", - }, - } - ) - }) - }) - - describe("POST /api/datasources/info", () => { - it("should fetch information about mysql datasource", async () => { - const primaryName = primaryMySqlTable.name - const response = await makeRequest("post", "/api/datasources/info", { - datasource: datasource, - }) - expect(response.status).toBe(200) - expect(response.body.tableNames).toBeDefined() - expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1) - }) - }) - describe("Integration compatibility with mysql search_path", () => { - let datasource: Datasource, rawDatasource: Datasource + let datasource: Datasource + let rawDatasource: Datasource + let client: Knex const database = generator.guid() const database2 = generator.guid() beforeAll(async () => { rawDatasource = await getDatasource(DatabaseName.MYSQL) + client = await knexClient(rawDatasource) - await rawQuery(rawDatasource, `CREATE DATABASE \`${database}\`;`) - await rawQuery(rawDatasource, `CREATE DATABASE \`${database2}\`;`) + await client.raw(`CREATE DATABASE \`${database}\`;`) + await client.raw(`CREATE DATABASE \`${database2}\`;`) - const pathConfig: any = { - ...rawDatasource, - config: { - ...rawDatasource.config!, - database, - }, - } - datasource = await config.api.datasource.create(pathConfig) + rawDatasource.config!.database = database + datasource = await config.api.datasource.create(rawDatasource) }) afterAll(async () => { - await rawQuery(rawDatasource, `DROP DATABASE \`${database}\`;`) - await rawQuery(rawDatasource, `DROP DATABASE \`${database2}\`;`) + await client.raw(`DROP DATABASE \`${database}\`;`) + await client.raw(`DROP DATABASE \`${database2}\`;`) }) it("discovers tables from any schema in search path", async () => { - await rawQuery( - rawDatasource, - `CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);` - ) - const response = await makeRequest("post", "/api/datasources/info", { - datasource: datasource, + await client.schema.createTable(`${database}.table1`, table => { + table.increments("id1").primary() }) - expect(response.status).toBe(200) - expect(response.body.tableNames).toBeDefined() - expect(response.body.tableNames).toEqual( - expect.arrayContaining(["table1"]) - ) + const res = await config.api.datasource.info(datasource) + expect(res.tableNames).toBeDefined() + expect(res.tableNames).toEqual(expect.arrayContaining(["table1"])) }) it("does not mix columns from different tables", async () => { const repeated_table_name = "table_same_name" - await rawQuery( - rawDatasource, - `CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);` - ) - await rawQuery( - rawDatasource, - `CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);` - ) - const response = await makeRequest( - "post", - `/api/datasources/${datasource._id}/schema`, - { - tablesFilter: [repeated_table_name], + await client.schema.createTable( + `${database}.${repeated_table_name}`, + table => { + table.increments("id").primary() + table.string("val1") } ) - expect(response.status).toBe(200) - expect( - response.body.datasource.entities[repeated_table_name].schema - ).toBeDefined() - const schema = - response.body.datasource.entities[repeated_table_name].schema - expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) - }) - }) - - describe("POST /api/tables/", () => { - it("will rename a column", async () => { - await makeRequest("post", "/api/tables/", primaryMySqlTable) - - let renameColumnOnTable: TableRequest = { - ...primaryMySqlTable, - schema: { - id: { - name: "id", - type: FieldType.AUTO, - autocolumn: true, - externalType: "unsigned integer", - }, - name: { - name: "name", - type: FieldType.STRING, - externalType: "text", - }, - description: { - name: "description", - type: FieldType.STRING, - externalType: "text", - }, - age: { - name: "age", - type: FieldType.NUMBER, - externalType: "float(8,2)", - }, - }, - } - - const response = await makeRequest( - "post", - "/api/tables/", - renameColumnOnTable + await client.schema.createTable( + `${database2}.${repeated_table_name}`, + table => { + table.increments("id2").primary() + table.string("val2") + } ) - const ds = ( - await makeRequest("post", `/api/datasources/${datasource._id}/schema`) - ).body.datasource - - expect(response.status).toEqual(200) - expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([ - "id", - "name", - "description", - "age", - ]) + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + tablesFilter: [repeated_table_name], + }) + expect(res.datasource.entities![repeated_table_name].schema).toBeDefined() + const schema = res.datasource.entities![repeated_table_name].schema + expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) }) }) @@ -290,28 +97,27 @@ describe("mysql integrations", () => { }) afterEach(async () => { - await rawQuery(rawDatasource, `DROP TABLE IF EXISTS \`${tableName}\``) + await client.schema.dropTableIfExists(tableName) }) it("recognises enum columns as options", async () => { const enumColumnName = "status" - const createTableQuery = ` - CREATE TABLE \`${tableName}\` ( - \`order_id\` INT AUTO_INCREMENT PRIMARY KEY, - \`customer_name\` VARCHAR(100) NOT NULL, - \`${enumColumnName}\` ENUM('pending', 'processing', 'shipped', 'delivered', 'cancelled') - ); - ` + await client.schema.createTable(tableName, table => { + table.increments("order_id").primary() + table.string("customer_name", 100).notNullable() + table.enum( + enumColumnName, + ["pending", "processing", "shipped", "delivered", "cancelled"], + { useNative: true, enumName: `${tableName}_${enumColumnName}` } + ) + }) - await rawQuery(rawDatasource, createTableQuery) + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) - const response = await makeRequest( - "post", - `/api/datasources/${datasource._id}/schema` - ) - - const table = response.body.datasource.entities[tableName] + const table = res.datasource.entities![tableName] expect(table).toBeDefined() expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts index ccf63d0820..f2e0382deb 100644 --- a/packages/server/src/integration-test/postgres.spec.ts +++ b/packages/server/src/integration-test/postgres.spec.ts @@ -1,1088 +1,29 @@ -import fetch from "node-fetch" -import { - generateMakeRequest, - MakeRequestResponse, -} from "../api/routes/public/tests/utils" - import * as setup from "../api/routes/tests/utilities" -import { - Datasource, - FieldType, - RelationshipType, - Row, - Table, - TableSourceType, -} from "@budibase/types" +import { Datasource, FieldType } from "@budibase/types" import _ from "lodash" import { generator } from "@budibase/backend-core/tests" -import { utils } from "@budibase/backend-core" import { DatabaseName, getDatasource, - rawQuery, + knexClient, } from "../integrations/tests/utils" - -// @ts-ignore -fetch.mockSearch() +import { Knex } from "knex" const config = setup.getConfig()! -jest.mock("../websockets") - describe("postgres integrations", () => { - let makeRequest: MakeRequestResponse, - rawDatasource: Datasource, - datasource: Datasource, - primaryPostgresTable: Table, - oneToManyRelationshipInfo: ForeignTableInfo, - manyToOneRelationshipInfo: ForeignTableInfo, - manyToManyRelationshipInfo: ForeignTableInfo + let datasource: Datasource + let client: Knex beforeAll(async () => { await config.init() - const apiKey = await config.generateApiKey() - - makeRequest = generateMakeRequest(apiKey, true) - - rawDatasource = await getDatasource(DatabaseName.POSTGRES) + const rawDatasource = await getDatasource(DatabaseName.POSTGRES) datasource = await config.api.datasource.create(rawDatasource) - }) - - beforeEach(async () => { - async function createAuxTable(prefix: string) { - return await config.createTable({ - name: `${prefix}_${generator - .guid() - .replaceAll("-", "") - .substring(0, 6)}`, - type: "table", - primary: ["id"], - primaryDisplay: "title", - schema: { - id: { - name: "id", - type: FieldType.AUTO, - autocolumn: true, - }, - title: { - name: "title", - type: FieldType.STRING, - }, - }, - sourceId: datasource._id, - sourceType: TableSourceType.EXTERNAL, - }) - } - - oneToManyRelationshipInfo = { - table: await createAuxTable("o2m"), - fieldName: "oneToManyRelation", - relationshipType: RelationshipType.ONE_TO_MANY, - } - manyToOneRelationshipInfo = { - table: await createAuxTable("m2o"), - fieldName: "manyToOneRelation", - relationshipType: RelationshipType.MANY_TO_ONE, - } - manyToManyRelationshipInfo = { - table: await createAuxTable("m2m"), - fieldName: "manyToManyRelation", - relationshipType: RelationshipType.MANY_TO_MANY, - } - - primaryPostgresTable = await config.createTable({ - name: `p_${generator.guid().replaceAll("-", "").substring(0, 6)}`, - type: "table", - primary: ["id"], - schema: { - id: { - name: "id", - type: FieldType.AUTO, - autocolumn: true, - }, - name: { - name: "name", - type: FieldType.STRING, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - value: { - name: "value", - type: FieldType.NUMBER, - }, - oneToManyRelation: { - type: FieldType.LINK, - constraints: { - type: "array", - }, - fieldName: oneToManyRelationshipInfo.fieldName, - name: "oneToManyRelation", - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: oneToManyRelationshipInfo.table._id!, - main: true, - }, - manyToOneRelation: { - type: FieldType.LINK, - constraints: { - type: "array", - }, - fieldName: manyToOneRelationshipInfo.fieldName, - name: "manyToOneRelation", - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: manyToOneRelationshipInfo.table._id!, - main: true, - }, - manyToManyRelation: { - type: FieldType.LINK, - constraints: { - type: "array", - }, - fieldName: manyToManyRelationshipInfo.fieldName, - name: "manyToManyRelation", - relationshipType: RelationshipType.MANY_TO_MANY, - tableId: manyToManyRelationshipInfo.table._id!, - main: true, - }, - }, - sourceId: datasource._id, - sourceType: TableSourceType.EXTERNAL, - }) + client = await knexClient(rawDatasource) }) afterAll(config.end) - function generateRandomPrimaryRowData() { - return { - name: generator.name(), - description: generator.paragraph(), - value: generator.age(), - } - } - - type PrimaryRowData = { - name: string - description: string - value: number - } - - type ForeignTableInfo = { - table: Table - fieldName: string - relationshipType: RelationshipType - } - - type ForeignRowsInfo = { - row: Row - relationshipType: RelationshipType - } - - async function createPrimaryRow(opts: { - rowData: PrimaryRowData - createForeignRows?: { - createOneToMany?: boolean - createManyToOne?: number - createManyToMany?: number - } - }) { - let { rowData } = opts as any - let foreignRows: ForeignRowsInfo[] = [] - - if (opts?.createForeignRows?.createOneToMany) { - const foreignKey = `fk_${oneToManyRelationshipInfo.table.name}_${oneToManyRelationshipInfo.fieldName}` - - const foreignRow = await config.createRow({ - tableId: oneToManyRelationshipInfo.table._id, - title: generator.name(), - }) - - rowData = { - ...rowData, - [foreignKey]: foreignRow.id, - } - foreignRows.push({ - row: foreignRow, - relationshipType: oneToManyRelationshipInfo.relationshipType, - }) - } - - for (let i = 0; i < (opts?.createForeignRows?.createManyToOne || 0); i++) { - const foreignRow = await config.createRow({ - tableId: manyToOneRelationshipInfo.table._id, - title: generator.name(), - }) - - rowData = { - ...rowData, - [manyToOneRelationshipInfo.fieldName]: - rowData[manyToOneRelationshipInfo.fieldName] || [], - } - rowData[manyToOneRelationshipInfo.fieldName].push(foreignRow._id) - foreignRows.push({ - row: foreignRow, - relationshipType: RelationshipType.MANY_TO_ONE, - }) - } - - for (let i = 0; i < (opts?.createForeignRows?.createManyToMany || 0); i++) { - const foreignRow = await config.createRow({ - tableId: manyToManyRelationshipInfo.table._id, - title: generator.name(), - }) - - rowData = { - ...rowData, - [manyToManyRelationshipInfo.fieldName]: - rowData[manyToManyRelationshipInfo.fieldName] || [], - } - rowData[manyToManyRelationshipInfo.fieldName].push(foreignRow._id) - foreignRows.push({ - row: foreignRow, - relationshipType: RelationshipType.MANY_TO_MANY, - }) - } - - const row = await config.createRow({ - tableId: primaryPostgresTable._id, - ...rowData, - }) - - return { row, foreignRows } - } - - async function createDefaultPgTable() { - return await config.createTable({ - name: generator.guid().replaceAll("-", "").substring(0, 10), - type: "table", - primary: ["id"], - schema: { - id: { - name: "id", - type: FieldType.AUTO, - autocolumn: true, - }, - }, - sourceId: datasource._id, - sourceType: TableSourceType.EXTERNAL, - }) - } - - const createRandomTableWithRows = async () => { - const tableId = (await createDefaultPgTable())._id! - return await config.api.row.save(tableId, { - tableId, - title: generator.name(), - }) - } - - async function populatePrimaryRows( - count: number, - opts?: { - createOneToMany?: boolean - createManyToOne?: number - createManyToMany?: number - } - ) { - return await Promise.all( - Array(count) - .fill({}) - .map(async () => { - const rowData = generateRandomPrimaryRowData() - return { - rowData, - ...(await createPrimaryRow({ - rowData, - createForeignRows: opts, - })), - } - }) - ) - } - - it("validate table schema", async () => { - const res = await makeRequest("get", `/api/datasources/${datasource._id}`) - - expect(res.status).toBe(200) - expect(res.body).toEqual({ - config: { - ca: false, - database: expect.any(String), - host: datasource.config!.host, - password: "--secret-value--", - port: datasource.config!.port, - rejectUnauthorized: false, - schema: "public", - ssl: false, - user: "postgres", - }, - plus: true, - source: "POSTGRES", - isSQL: true, - type: "datasource_plus", - _id: expect.any(String), - _rev: expect.any(String), - createdAt: expect.any(String), - updatedAt: expect.any(String), - entities: expect.any(Object), - }) - }) - - describe("POST /api/:tableId/rows", () => { - const createRow = (tableId: string | undefined, body: object) => - makeRequest("post", `/api/${tableId}/rows`, body) - - describe("given than no row exists", () => { - it("adding a new one persists it", async () => { - const newRow = generateRandomPrimaryRowData() - - const res = await createRow(primaryPostgresTable._id, newRow) - - expect(res.status).toBe(200) - - const persistedRows = await config.getRows(primaryPostgresTable._id!) - expect(persistedRows).toHaveLength(1) - - const expected = { - ...res.body, - ...newRow, - } - - expect(persistedRows).toEqual([expect.objectContaining(expected)]) - }) - - it("multiple rows can be persisted", async () => { - const numberOfRows = 10 - const newRows: Row[] = Array(numberOfRows).fill( - generateRandomPrimaryRowData() - ) - - await Promise.all( - newRows.map(async newRow => { - const res = await createRow(primaryPostgresTable._id, newRow) - expect(res.status).toBe(200) - }) - ) - - const persistedRows = await config.getRows(primaryPostgresTable._id!) - expect(persistedRows).toHaveLength(numberOfRows) - expect(persistedRows).toEqual( - expect.arrayContaining(newRows.map(expect.objectContaining)) - ) - }) - }) - }) - - describe("PATCH /api/:tableId/rows", () => { - const updateRow = (tableId: string | undefined, body: Row) => - makeRequest("patch", `/api/${tableId}/rows`, body) - - describe("given than a row exists", () => { - let row: Row - beforeEach(async () => { - let rowResponse = _.sample(await populatePrimaryRows(1))! - row = rowResponse.row - }) - - it("updating it persists it", async () => { - const newName = generator.name() - const newValue = generator.age() - const updatedRow = { - ...row, - name: newName, - value: newValue, - } - - const res = await updateRow(primaryPostgresTable._id, updatedRow) - - expect(res.status).toBe(200) - expect(res.body).toEqual(updatedRow) - - const persistedRow = await config.api.row.get( - primaryPostgresTable._id!, - row.id - ) - - expect(persistedRow).toEqual( - expect.objectContaining({ - id: row.id, - name: newName, - value: newValue, - }) - ) - }) - }) - }) - - describe("DELETE /api/:tableId/rows", () => { - const deleteRow = ( - tableId: string | undefined, - body: Row | { rows: Row[] } - ) => makeRequest("delete", `/api/${tableId}/rows`, body) - - describe("given than multiple row exist", () => { - const numberOfInitialRows = 5 - let rows: Row[] - beforeEach(async () => { - rows = (await populatePrimaryRows(numberOfInitialRows)).map(x => x.row) - }) - - it("delete request removes it", async () => { - const row = _.sample(rows)! - const res = await deleteRow(primaryPostgresTable._id, row) - - expect(res.status).toBe(200) - - const persistedRows = await config.getRows(primaryPostgresTable._id!) - expect(persistedRows).toHaveLength(numberOfInitialRows - 1) - - expect(row.id).toBeDefined() - expect(persistedRows).not.toContain( - expect.objectContaining({ _id: row.id }) - ) - }) - - it("multiple rows can be removed at once", async () => { - let rowsToDelete = _.sampleSize(rows, 3)! - - const res = await deleteRow(primaryPostgresTable._id, { - rows: rowsToDelete, - }) - - expect(res.status).toBe(200) - - const persistedRows = await config.getRows(primaryPostgresTable._id!) - expect(persistedRows).toHaveLength(numberOfInitialRows - 3) - - for (const row of rowsToDelete) { - expect(persistedRows).not.toContain( - expect.objectContaining({ _id: row.id }) - ) - } - }) - }) - }) - - describe("GET /api/:tableId/rows/:rowId", () => { - const getRow = (tableId: string | undefined, rowId?: string | undefined) => - makeRequest("get", `/api/${tableId}/rows/${rowId}`) - - describe("given than a table have a single row", () => { - let rowData: PrimaryRowData, row: Row - beforeEach(async () => { - const [createdRow] = await populatePrimaryRows(1) - rowData = createdRow.rowData - row = createdRow.row - }) - - it("the row can be retrieved successfully", async () => { - const res = await getRow(primaryPostgresTable._id, row.id) - - expect(res.status).toBe(200) - - expect(res.body).toEqual(expect.objectContaining(rowData)) - }) - }) - - describe("given than a table have a multiple rows", () => { - let rows: { row: Row; rowData: PrimaryRowData }[] - - beforeEach(async () => { - rows = await populatePrimaryRows(5) - }) - - it("a single row can be retrieved successfully", async () => { - const { rowData, row } = _.sample(rows)! - - const res = await getRow(primaryPostgresTable._id, row.id) - - expect(res.status).toBe(200) - - expect(res.body).toEqual(expect.objectContaining(rowData)) - }) - }) - - describe("given a row with relation data", () => { - let row: Row - let rowData: { - name: string - description: string - value: number - } - let foreignRows: ForeignRowsInfo[] - - describe("with all relationship types", () => { - beforeEach(async () => { - let [createdRow] = await populatePrimaryRows(1, { - createOneToMany: true, - createManyToOne: 3, - createManyToMany: 2, - }) - row = createdRow.row - rowData = createdRow.rowData - foreignRows = createdRow.foreignRows - }) - - it("only one to primary keys are retrieved", async () => { - const res = await getRow(primaryPostgresTable._id, row.id) - - expect(res.status).toBe(200) - - const one2ManyForeignRows = foreignRows.filter( - x => x.relationshipType === RelationshipType.ONE_TO_MANY - ) - const many2OneForeignRows = foreignRows.filter( - x => x.relationshipType === RelationshipType.MANY_TO_ONE - ) - const many2ManyForeignRows = foreignRows.filter( - x => x.relationshipType === RelationshipType.MANY_TO_MANY - ) - expect(one2ManyForeignRows).toHaveLength(1) - - expect(res.body).toEqual({ - ...rowData, - id: row.id, - tableId: row.tableId, - _id: expect.any(String), - _rev: expect.any(String), - [`fk_${oneToManyRelationshipInfo.table.name}_${oneToManyRelationshipInfo.fieldName}`]: - one2ManyForeignRows[0].row.id, - [oneToManyRelationshipInfo.fieldName]: expect.arrayContaining( - one2ManyForeignRows.map(r => ({ - _id: r.row._id, - primaryDisplay: r.row.title, - })) - ), - [manyToOneRelationshipInfo.fieldName]: expect.arrayContaining( - many2OneForeignRows.map(r => ({ - _id: r.row._id, - primaryDisplay: r.row.title, - })) - ), - [manyToManyRelationshipInfo.fieldName]: expect.arrayContaining( - many2ManyForeignRows.map(r => ({ - _id: r.row._id, - primaryDisplay: r.row.title, - })) - ), - }) - }) - }) - - describe("with only one to many", () => { - beforeEach(async () => { - let [createdRow] = await populatePrimaryRows(1, { - createOneToMany: true, - }) - row = createdRow.row - rowData = createdRow.rowData - foreignRows = createdRow.foreignRows - }) - - it("only one to many foreign keys are retrieved", async () => { - const res = await getRow(primaryPostgresTable._id, row.id) - - expect(res.status).toBe(200) - - expect(foreignRows).toHaveLength(1) - - expect(res.body).toEqual({ - ...rowData, - id: row.id, - tableId: row.tableId, - _id: expect.any(String), - _rev: expect.any(String), - [`fk_${oneToManyRelationshipInfo.table.name}_${oneToManyRelationshipInfo.fieldName}`]: - foreignRows[0].row.id, - [oneToManyRelationshipInfo.fieldName]: expect.arrayContaining( - foreignRows.map(r => ({ - _id: r.row._id, - primaryDisplay: r.row.title, - })) - ), - }) - }) - }) - - describe("with only many to one", () => { - beforeEach(async () => { - let [createdRow] = await populatePrimaryRows(1, { - createManyToOne: 3, - }) - row = createdRow.row - rowData = createdRow.rowData - foreignRows = createdRow.foreignRows - }) - - it("only one to many foreign keys are retrieved", async () => { - const res = await getRow(primaryPostgresTable._id, row.id) - - expect(res.status).toBe(200) - - expect(foreignRows).toHaveLength(3) - - expect(res.body).toEqual({ - ...rowData, - id: row.id, - tableId: row.tableId, - _id: expect.any(String), - _rev: expect.any(String), - [manyToOneRelationshipInfo.fieldName]: expect.arrayContaining( - foreignRows.map(r => ({ - _id: r.row._id, - primaryDisplay: r.row.title, - })) - ), - }) - }) - }) - - describe("with only many to many", () => { - beforeEach(async () => { - let [createdRow] = await populatePrimaryRows(1, { - createManyToMany: 2, - }) - row = createdRow.row - rowData = createdRow.rowData - foreignRows = createdRow.foreignRows - }) - - it("only one to many foreign keys are retrieved", async () => { - const res = await getRow(primaryPostgresTable._id, row.id) - - expect(res.status).toBe(200) - - expect(foreignRows).toHaveLength(2) - - expect(res.body).toEqual({ - ...rowData, - id: row.id, - tableId: row.tableId, - _id: expect.any(String), - _rev: expect.any(String), - [manyToManyRelationshipInfo.fieldName]: expect.arrayContaining( - foreignRows.map(r => ({ - _id: r.row._id, - primaryDisplay: r.row.title, - })) - ), - }) - }) - }) - }) - }) - - describe("POST /api/:tableId/search", () => { - const search = (tableId: string | undefined, body?: object) => - makeRequest("post", `/api/${tableId}/search`, body) - - describe("search without parameters", () => { - describe("given than a table has no rows", () => { - it("search without query returns empty", async () => { - const res = await search(primaryPostgresTable._id) - - expect(res.status).toBe(200) - - expect(res.body).toEqual({ - rows: [], - hasNextPage: false, - }) - }) - }) - - describe("given than a table has multiple rows", () => { - const rowsCount = 6 - let rows: { - row: Row - rowData: PrimaryRowData - }[] - beforeEach(async () => { - rows = await populatePrimaryRows(rowsCount) - }) - - it("search without query returns all of them", async () => { - const res = await search(primaryPostgresTable._id) - - expect(res.status).toBe(200) - - expect(res.body).toEqual({ - rows: expect.arrayContaining( - rows.map(r => expect.objectContaining(r.rowData)) - ), - hasNextPage: false, - }) - expect(res.body.rows).toHaveLength(rowsCount) - }) - }) - - describe("given than multiple tables have multiple rows", () => { - const rowsCount = 6 - beforeEach(async () => { - await createRandomTableWithRows() - await createRandomTableWithRows() - - await populatePrimaryRows(rowsCount) - - await createRandomTableWithRows() - }) - it("search only return the requested ones", async () => { - const res = await search(primaryPostgresTable._id) - - expect(res.status).toBe(200) - - expect(res.body.rows).toHaveLength(rowsCount) - }) - }) - }) - - it("Querying by a string field returns the rows with field containing or starting by that value", async () => { - const name = generator.name() - const rowsToFilter = [ - ...Array(2).fill({ - name, - description: generator.paragraph(), - value: generator.age(), - }), - ...Array(2).fill({ - name: `${name}${utils.newid()}`, - description: generator.paragraph(), - value: generator.age(), - }), - ] - - await populatePrimaryRows(3) - for (const row of rowsToFilter) { - await createPrimaryRow({ - rowData: row, - }) - } - await populatePrimaryRows(1) - - const res = await search(primaryPostgresTable._id, { - query: { - string: { - name, - }, - }, - }) - - expect(res.status).toBe(200) - - expect(res.body).toEqual({ - rows: expect.arrayContaining(rowsToFilter.map(expect.objectContaining)), - hasNextPage: false, - }) - expect(res.body.rows).toHaveLength(4) - }) - - it("Querying respects the limit fields", async () => { - await populatePrimaryRows(6) - - const res = await search(primaryPostgresTable._id, { - limit: 2, - }) - - expect(res.status).toBe(200) - - expect(res.body.rows).toHaveLength(2) - }) - - describe("sort", () => { - beforeEach(async () => { - const defaultValue = generateRandomPrimaryRowData() - - await createPrimaryRow({ - rowData: { - ...defaultValue, - name: "d", - value: 3, - }, - }) - await createPrimaryRow({ - rowData: { ...defaultValue, name: "aaa", value: 40 }, - }) - await createPrimaryRow({ - rowData: { ...defaultValue, name: "ccccc", value: -5 }, - }) - await createPrimaryRow({ - rowData: { ...defaultValue, name: "bb", value: 0 }, - }) - }) - - it("Querying respects the sort order when sorting ascending by a string value", async () => { - const res = await search(primaryPostgresTable._id, { - sort: "name", - sortOrder: "ascending", - sortType: "string", - }) - - expect(res.status).toBe(200) - expect(res.body.rows).toEqual([ - expect.objectContaining({ name: "aaa" }), - expect.objectContaining({ name: "bb" }), - expect.objectContaining({ name: "ccccc" }), - expect.objectContaining({ name: "d" }), - ]) - }) - - it("Querying respects the sort order when sorting descending by a string value", async () => { - const res = await search(primaryPostgresTable._id, { - sort: "name", - sortOrder: "descending", - sortType: "string", - }) - - expect(res.status).toBe(200) - expect(res.body.rows).toEqual([ - expect.objectContaining({ name: "d" }), - expect.objectContaining({ name: "ccccc" }), - expect.objectContaining({ name: "bb" }), - expect.objectContaining({ name: "aaa" }), - ]) - }) - - it("Querying respects the sort order when sorting ascending by a numeric value", async () => { - const res = await search(primaryPostgresTable._id, { - sort: "value", - sortOrder: "ascending", - sortType: "number", - }) - - expect(res.status).toBe(200) - expect(res.body.rows).toEqual([ - expect.objectContaining({ value: -5 }), - expect.objectContaining({ value: 0 }), - expect.objectContaining({ value: 3 }), - expect.objectContaining({ value: 40 }), - ]) - }) - - it("Querying respects the sort order when sorting descending by a numeric value", async () => { - const res = await search(primaryPostgresTable._id, { - sort: "value", - sortOrder: "descending", - sortType: "number", - }) - - expect(res.status).toBe(200) - expect(res.body.rows).toEqual([ - expect.objectContaining({ value: 40 }), - expect.objectContaining({ value: 3 }), - expect.objectContaining({ value: 0 }), - expect.objectContaining({ value: -5 }), - ]) - }) - }) - }) - - describe("GET /api/:tableId/:rowId/enrich", () => { - const getAll = (tableId: string | undefined, rowId: string | undefined) => - makeRequest("get", `/api/${tableId}/${rowId}/enrich`) - describe("given a row with relation data", () => { - let row: Row, rowData: PrimaryRowData, foreignRows: ForeignRowsInfo[] - - describe("with all relationship types", () => { - beforeEach(async () => { - rowData = generateRandomPrimaryRowData() - const rowsInfo = await createPrimaryRow({ - rowData, - createForeignRows: { - createOneToMany: true, - createManyToOne: 3, - createManyToMany: 2, - }, - }) - - row = rowsInfo.row - foreignRows = rowsInfo.foreignRows - }) - - it("enrich populates the foreign fields", async () => { - const res = await getAll(primaryPostgresTable._id, row.id) - - expect(res.status).toBe(200) - - const foreignRowsByType = _.groupBy( - foreignRows, - x => x.relationshipType - ) - const m2mFieldName = manyToManyRelationshipInfo.fieldName, - o2mFieldName = oneToManyRelationshipInfo.fieldName, - m2oFieldName = manyToOneRelationshipInfo.fieldName - const m2mRow1 = res.body[m2mFieldName].find( - (row: Row) => row.id === 1 - ) - const m2mRow2 = res.body[m2mFieldName].find( - (row: Row) => row.id === 2 - ) - expect(m2mRow1).toEqual({ - ...foreignRowsByType[RelationshipType.MANY_TO_MANY][0].row, - [m2mFieldName]: [ - { - _id: row._id, - }, - ], - }) - expect(m2mRow2).toEqual({ - ...foreignRowsByType[RelationshipType.MANY_TO_MANY][1].row, - [m2mFieldName]: [ - { - _id: row._id, - }, - ], - }) - const m2oRel = { - [m2oFieldName]: [ - { - _id: row._id, - }, - ], - } - expect(res.body[m2oFieldName]).toEqual([ - { - ...m2oRel, - ...foreignRowsByType[RelationshipType.MANY_TO_ONE][0].row, - [`fk_${manyToOneRelationshipInfo.table.name}_${manyToOneRelationshipInfo.fieldName}`]: - row.id, - }, - { - ...m2oRel, - ...foreignRowsByType[RelationshipType.MANY_TO_ONE][1].row, - [`fk_${manyToOneRelationshipInfo.table.name}_${manyToOneRelationshipInfo.fieldName}`]: - row.id, - }, - { - ...m2oRel, - ...foreignRowsByType[RelationshipType.MANY_TO_ONE][2].row, - [`fk_${manyToOneRelationshipInfo.table.name}_${manyToOneRelationshipInfo.fieldName}`]: - row.id, - }, - ]) - const o2mRel = { - [o2mFieldName]: [ - { - _id: row._id, - }, - ], - } - expect(res.body[o2mFieldName]).toEqual([ - { - ...o2mRel, - ...foreignRowsByType[RelationshipType.ONE_TO_MANY][0].row, - _id: expect.any(String), - _rev: expect.any(String), - }, - ]) - }) - }) - }) - }) - - describe("GET /api/:tableId/rows", () => { - const getAll = (tableId: string | undefined) => - makeRequest("get", `/api/${tableId}/rows`) - - describe("given a table with no rows", () => { - it("get request returns empty", async () => { - const res = await getAll(primaryPostgresTable._id) - - expect(res.status).toBe(200) - - expect(res.body).toHaveLength(0) - }) - }) - describe("given a table with multiple rows", () => { - const rowsCount = 6 - let rows: { - row: Row - foreignRows: ForeignRowsInfo[] - rowData: PrimaryRowData - }[] - beforeEach(async () => { - rows = await populatePrimaryRows(rowsCount) - }) - - it("get request returns all of them", async () => { - const res = await getAll(primaryPostgresTable._id) - - expect(res.status).toBe(200) - - expect(res.body).toHaveLength(rowsCount) - expect(res.body).toEqual( - expect.arrayContaining( - rows.map(r => expect.objectContaining(r.rowData)) - ) - ) - }) - }) - - describe("given multiple tables with multiple rows", () => { - const rowsCount = 6 - - beforeEach(async () => { - await createRandomTableWithRows() - await populatePrimaryRows(rowsCount) - await createRandomTableWithRows() - }) - - it("get returns the requested ones", async () => { - const res = await getAll(primaryPostgresTable._id) - - expect(res.status).toBe(200) - - expect(res.body).toHaveLength(rowsCount) - }) - }) - }) - - describe("POST /api/datasources/verify", () => { - it("should be able to verify the connection", async () => { - await config.api.datasource.verify( - { - datasource: await getDatasource(DatabaseName.POSTGRES), - }, - { - body: { - connected: true, - }, - } - ) - }) - - it("should state an invalid datasource cannot connect", async () => { - const dbConfig = await getDatasource(DatabaseName.POSTGRES) - await config.api.datasource.verify( - { - datasource: { - ...dbConfig, - config: { - ...dbConfig.config, - password: "wrongpassword", - }, - }, - }, - { - body: { - connected: false, - error: 'password authentication failed for user "postgres"', - }, - } - ) - }) - }) - - describe("POST /api/datasources/info", () => { - it("should fetch information about postgres datasource", async () => { - const primaryName = primaryPostgresTable.name - const response = await makeRequest("post", "/api/datasources/info", { - datasource: datasource, - }) - expect(response.status).toBe(200) - expect(response.body.tableNames).toBeDefined() - expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1) - }) - }) - describe("POST /api/datasources/:datasourceId/schema", () => { let tableName: string @@ -1091,34 +32,33 @@ describe("postgres integrations", () => { }) afterEach(async () => { - await rawQuery(rawDatasource, `DROP TABLE IF EXISTS "${tableName}"`) + await client.schema.dropTableIfExists(tableName) }) it("recognises when a table has no primary key", async () => { - await rawQuery(rawDatasource, `CREATE TABLE "${tableName}" (id SERIAL)`) + await client.schema.createTable(tableName, table => { + table.increments("id", { primaryKey: false }) + }) - const response = await makeRequest( - "post", - `/api/datasources/${datasource._id}/schema` - ) + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) - expect(response.body.errors).toEqual({ + expect(response.errors).toEqual({ [tableName]: "Table must have a primary key.", }) }) it("recognises when a table is using a reserved column name", async () => { - await rawQuery( - rawDatasource, - `CREATE TABLE "${tableName}" (_id SERIAL PRIMARY KEY) ` - ) + await client.schema.createTable(tableName, table => { + table.increments("_id").primary() + }) - const response = await makeRequest( - "post", - `/api/datasources/${datasource._id}/schema` - ) + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) - expect(response.body.errors).toEqual({ + expect(response.errors).toEqual({ [tableName]: "Table contains invalid columns.", }) }) @@ -1128,134 +68,119 @@ describe("postgres integrations", () => { .guid() .replaceAll("-", "") .substring(0, 6)}` - const enumColumnName = "status" - await rawQuery( - rawDatasource, - ` - CREATE TYPE order_status AS ENUM ('pending', 'processing', 'shipped', 'delivered', 'cancelled'); - - CREATE TABLE ${tableName} ( - order_id SERIAL PRIMARY KEY, - customer_name VARCHAR(100) NOT NULL, - ${enumColumnName} order_status - ); - ` - ) + await client.schema.createTable(tableName, table => { + table.increments("order_id").primary() + table.string("customer_name").notNullable() + table.enum("status", ["pending", "processing", "shipped"], { + useNative: true, + enumName: `${tableName}_status`, + }) + }) - const response = await makeRequest( - "post", - `/api/datasources/${datasource._id}/schema` - ) + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) - const table = response.body.datasource.entities[tableName] + const table = response.datasource.entities?.[tableName] expect(table).toBeDefined() - expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) + expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) }) }) describe("Integration compatibility with postgres search_path", () => { - let rawDatasource: Datasource, - datasource: Datasource, - schema1: string, - schema2: string + let datasource: Datasource + let client: Knex + let schema1: string + let schema2: string beforeEach(async () => { schema1 = generator.guid().replaceAll("-", "") schema2 = generator.guid().replaceAll("-", "") - rawDatasource = await getDatasource(DatabaseName.POSTGRES) - const dbConfig = rawDatasource.config! + const rawDatasource = await getDatasource(DatabaseName.POSTGRES) + client = await knexClient(rawDatasource) - await rawQuery(rawDatasource, `CREATE SCHEMA "${schema1}";`) - await rawQuery(rawDatasource, `CREATE SCHEMA "${schema2}";`) + await client.schema.createSchema(schema1) + await client.schema.createSchema(schema2) - const pathConfig: any = { - ...rawDatasource, - config: { - ...dbConfig, - schema: `${schema1}, ${schema2}`, - }, - } - datasource = await config.api.datasource.create(pathConfig) + rawDatasource.config!.schema = `${schema1}, ${schema2}` + + client = await knexClient(rawDatasource) + datasource = await config.api.datasource.create(rawDatasource) }) afterEach(async () => { - await rawQuery(rawDatasource, `DROP SCHEMA "${schema1}" CASCADE;`) - await rawQuery(rawDatasource, `DROP SCHEMA "${schema2}" CASCADE;`) + await client.schema.dropSchema(schema1, true) + await client.schema.dropSchema(schema2, true) }) it("discovers tables from any schema in search path", async () => { - await rawQuery( - rawDatasource, - `CREATE TABLE "${schema1}".table1 (id1 SERIAL PRIMARY KEY);` - ) - await rawQuery( - rawDatasource, - `CREATE TABLE "${schema2}".table2 (id2 SERIAL PRIMARY KEY);` - ) - const response = await makeRequest("post", "/api/datasources/info", { - datasource: datasource, + await client.schema.createTable(`${schema1}.table1`, table => { + table.increments("id1").primary() }) - expect(response.status).toBe(200) - expect(response.body.tableNames).toBeDefined() - expect(response.body.tableNames).toEqual( + + await client.schema.createTable(`${schema2}.table2`, table => { + table.increments("id2").primary() + }) + + const response = await config.api.datasource.info(datasource) + expect(response.tableNames).toBeDefined() + expect(response.tableNames).toEqual( expect.arrayContaining(["table1", "table2"]) ) }) it("does not mix columns from different tables", async () => { const repeated_table_name = "table_same_name" - await rawQuery( - rawDatasource, - `CREATE TABLE "${schema1}".${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);` - ) - await rawQuery( - rawDatasource, - `CREATE TABLE "${schema2}".${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);` - ) - const response = await makeRequest( - "post", - `/api/datasources/${datasource._id}/schema`, - { - tablesFilter: [repeated_table_name], + + await client.schema.createTable( + `${schema1}.${repeated_table_name}`, + table => { + table.increments("id").primary() + table.string("val1") } ) - expect(response.status).toBe(200) + + await client.schema.createTable( + `${schema2}.${repeated_table_name}`, + table => { + table.increments("id2").primary() + table.string("val2") + } + ) + + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + tablesFilter: [repeated_table_name], + }) expect( - response.body.datasource.entities[repeated_table_name].schema + response.datasource.entities?.[repeated_table_name].schema ).toBeDefined() - const schema = - response.body.datasource.entities[repeated_table_name].schema - expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) + const schema = response.datasource.entities?.[repeated_table_name].schema + expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"]) }) }) describe("check custom column types", () => { beforeAll(async () => { - await rawQuery( - rawDatasource, - `CREATE TABLE binaryTable ( - id BYTEA PRIMARY KEY, - column1 TEXT, - column2 INT - ); - ` - ) + await client.schema.createTable("binaryTable", table => { + table.binary("id").primary() + table.string("column1") + table.integer("column2") + }) }) it("should handle binary columns", async () => { - const response = await makeRequest( - "post", - `/api/datasources/${datasource._id}/schema` - ) - expect(response.body).toBeDefined() - expect(response.body.datasource.entities).toBeDefined() - const table = response.body.datasource.entities["binarytable"] + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + expect(response.datasource.entities).toBeDefined() + const table = response.datasource.entities?.["binaryTable"] expect(table).toBeDefined() - expect(table.schema.id.externalType).toBe("bytea") - const row = await config.api.row.save(table._id, { + expect(table?.schema.id.externalType).toBe("bytea") + const row = await config.api.row.save(table?._id!, { id: "1111", column1: "hello", column2: 222, @@ -1265,4 +190,43 @@ describe("postgres integrations", () => { expect(JSON.parse(decoded)[0]).toBe("1111") }) }) + + describe("check fetching null/not null table", () => { + beforeAll(async () => { + await client.schema.createTable("nullableTable", table => { + table.increments("order_id").primary() + table.integer("order_number").notNullable() + }) + }) + + it("should be able to change the table to allow nullable and refetch this", async () => { + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + const entities = response.datasource.entities + expect(entities).toBeDefined() + const nullableTable = entities?.["nullableTable"] + expect(nullableTable).toBeDefined() + expect( + nullableTable?.schema["order_number"].constraints?.presence + ).toEqual(true) + + // need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase + // is aware of - therefore we can try to fetch and make sure BB updates correctly + await client.schema.alterTable("nullableTable", table => { + table.setNullable("order_number") + }) + + const responseAfter = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + const entitiesAfter = responseAfter.datasource.entities + expect(entitiesAfter).toBeDefined() + const nullableTableAfter = entitiesAfter?.["nullableTable"] + expect(nullableTableAfter).toBeDefined() + expect( + nullableTableAfter?.schema["order_number"].constraints?.presence + ).toBeUndefined() + }) + }) }) diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts index 371592bece..55886cd20f 100644 --- a/packages/server/src/integrations/base/query.ts +++ b/packages/server/src/integrations/base/query.ts @@ -22,6 +22,9 @@ export async function makeExternalQuery( ) { throw new Error("Entity ID and table metadata do not align") } + if (!datasource) { + throw new Error("No datasource provided for external query") + } datasource = await sdk.datasources.enrich(datasource) const Integration = await getIntegration(datasource.source) // query is the opinionated function diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 2da0c2c201..9dee9b2a53 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -566,7 +566,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { query.filters.equal[`_${GOOGLE_SHEETS_PRIMARY_KEY}`] = id } } - let filtered = dataFilters.runLuceneQuery(rows, query.filters) + let filtered = dataFilters.runQuery(rows, query.filters || {}) if (hasFilters && query.paginate) { filtered = filtered.slice(offset, offset + limit) } @@ -585,7 +585,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { }) } const [sortField, sortInfo] = Object.entries(query.sort)[0] - response = dataFilters.luceneSort( + response = dataFilters.sort( response, sortField, sortInfo.direction, diff --git a/packages/server/src/integrations/rest.ts b/packages/server/src/integrations/rest.ts index 451c319aa9..86c059bc82 100644 --- a/packages/server/src/integrations/rest.ts +++ b/packages/server/src/integrations/rest.ts @@ -149,13 +149,12 @@ class RestIntegration implements IntegrationBase { { downloadImages: this.config.downloadImages } ) let contentLength = response.headers.get("content-length") - if (!contentLength && raw) { - contentLength = Buffer.byteLength(raw, "utf8").toString() - } + let isSuccess = response.status >= 200 && response.status < 300 if ( - contentDisposition.includes("filename") || - contentDisposition.includes("attachment") || - contentDisposition.includes("form-data") + (contentDisposition.includes("filename") || + contentDisposition.includes("attachment") || + contentDisposition.includes("form-data")) && + isSuccess ) { filename = path.basename(parse(contentDisposition).parameters?.filename) || "" @@ -168,6 +167,9 @@ class RestIntegration implements IntegrationBase { return handleFileResponse(response, filename, this.startTimeMs) } else { responseTxt = response.text ? await response.text() : "" + if (!contentLength && responseTxt) { + contentLength = Buffer.byteLength(responseTxt, "utf8").toString() + } const hasContent = (contentLength && parseInt(contentLength) > 0) || responseTxt.length > 0 diff --git a/packages/server/src/integrations/tests/rest.spec.ts b/packages/server/src/integrations/tests/rest.spec.ts index f20f369c25..dee17a5497 100644 --- a/packages/server/src/integrations/tests/rest.spec.ts +++ b/packages/server/src/integrations/tests/rest.spec.ts @@ -657,6 +657,7 @@ describe("REST Integration", () => { mockReadable.push(null) ;(fetch as unknown as jest.Mock).mockImplementationOnce(() => Promise.resolve({ + status: 200, headers: { raw: () => ({ "content-type": [contentType], @@ -700,6 +701,7 @@ describe("REST Integration", () => { mockReadable.push(null) ;(fetch as unknown as jest.Mock).mockImplementationOnce(() => Promise.resolve({ + status: 200, headers: { raw: () => ({ "content-type": [contentType], diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index cad1b346c0..b595508093 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -142,7 +142,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson({ schema: "production" })) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" order by "test"."id" asc limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`, }) }) @@ -150,7 +150,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson()) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" order by "test"."id" asc limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" order by "test"."id" asc limit $2`, }) }) @@ -160,7 +160,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`, + sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" order by "test"."id" asc limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" order by "test"."id" asc limit $2`, }) }) @@ -175,8 +175,8 @@ describe("SQL query builder", () => { }) ) expect(query).toEqual({ - bindings: ["john%", limit], - sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, + bindings: ["john%", limit, 5000], + sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, }) query = new Sql(SqlClient.ORACLE, limit)._query( @@ -190,8 +190,8 @@ describe("SQL query builder", () => { }) ) expect(query).toEqual({ - bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit], - sql: `select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4)) where rownum <= :5) "test"`, + bindings: ["%20%", "%25%", `%"john"%`, `%"mary"%`, limit, 5000], + sql: `select * from (select * from (select * from (select * from "test" where (COALESCE(LOWER("test"."age"), '') LIKE :1 AND COALESCE(LOWER("test"."age"), '') LIKE :2) and (COALESCE(LOWER("test"."name"), '') LIKE :3 AND COALESCE(LOWER("test"."name"), '') LIKE :4) order by "test"."id" asc) where rownum <= :5) "test" order by "test"."id" asc) where rownum <= :6`, }) query = new Sql(SqlClient.ORACLE, limit)._query( @@ -204,8 +204,8 @@ describe("SQL query builder", () => { }) ) expect(query).toEqual({ - bindings: [`%jo%`, limit], - sql: `select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1) where rownum <= :2) "test"`, + bindings: [`%jo%`, limit, 5000], + sql: `select * from (select * from (select * from (select * from "test" where LOWER("test"."name") LIKE :1 order by "test"."id" asc) where rownum <= :2) "test" order by "test"."id" asc) where rownum <= :3`, }) }) }) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index 67f3d1d05d..0b433896bf 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -57,15 +57,14 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, limit], - sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", + sql: expect.stringContaining( + multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "persons" as "a" order by "a"."firstname" asc nulls first limit $1) as "a" - left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" - order by "a"."firstname" asc nulls first limit $2`), + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid"`) + ), }) }) @@ -74,13 +73,10 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, "assembling", limit], - sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", - "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" - left join "products_tasks" as "c" on "a"."productid" = "c"."productid" - left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where COALESCE("b"."taskname" = $2, FALSE) - order by "a"."productname" asc nulls first limit $3`), + sql: expect.stringContaining( + multiline(`where COALESCE("b"."taskname" = $2, FALSE) + order by "a"."productname" asc nulls first, "a"."productid" asc limit $3`) + ), }) }) @@ -89,13 +85,10 @@ describe("Captures of real examples", () => { let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) expect(query).toEqual({ bindings: [relationshipLimit, limit], - sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", - "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", - "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" - from (select * from "products" as "a" order by "a"."productname" asc nulls first limit $1) as "a" - left join "products_tasks" as "c" on "a"."productid" = "c"."productid" - left join "tasks" as "b" on "b"."taskid" = "c"."taskid" - order by "a"."productname" asc nulls first limit $2`), + sql: expect.stringContaining( + multiline(`left join "products_tasks" as "c" on "a"."productid" = "c"."productid" + left join "tasks" as "b" on "b"."taskid" = "c"."taskid" `) + ), }) }) @@ -106,11 +99,11 @@ describe("Captures of real examples", () => { expect(query).toEqual({ bindings: [...filters, limit, limit], sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", - "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", - "b"."productname" as "b.productname", "b"."productid" as "b.productid" - from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" - left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" - left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`), + "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", + "b"."productname" as "b.productname", "b"."productid" as "b.productid" + from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) order by "a"."taskid" asc limit $3) as "a" + left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" + left join "products" as "b" on "b"."productid" = "c"."productid" order by "a"."taskid" asc limit $4`), }) }) @@ -132,19 +125,11 @@ describe("Captures of real examples", () => { equalValue, limit, ], - sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", - "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", - "b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", - "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", - "c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", - "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", - "c"."city" as "c.city", "c"."lastname" as "c.lastname" - from (select * from "tasks" as "a" where COALESCE("a"."completed" != $1, TRUE) - order by "a"."taskname" asc nulls first limit $2) as "a" - left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" - left join "products" as "b" on "b"."productid" = "d"."productid" - left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" - where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE) order by "a"."taskname" asc nulls first limit $6`), + sql: expect.stringContaining( + multiline( + `where "c"."year" between $3 and $4 and COALESCE("b"."productname" = $5, FALSE)` + ) + ), }) }) }) @@ -200,8 +185,9 @@ describe("Captures of real examples", () => { returningQuery = input }, queryJson) expect(returningQuery).toEqual({ - sql: "select * from (select top (@p0) * from [people] where CASE WHEN [people].[name] = @p1 THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p2 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]", - bindings: [1, "Test", 22], + sql: multiline(`select top (@p0) * from (select top (@p1) * from [people] where CASE WHEN [people].[name] = @p2 + THEN 1 ELSE 0 END = 1 and CASE WHEN [people].[age] = @p3 THEN 1 ELSE 0 END = 1 order by [people].[name] asc) as [people]`), + bindings: [5000, 1, "Test", 22], }) }) }) diff --git a/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json b/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json index ba7fa4ef9b..aa2cab3afd 100644 --- a/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json +++ b/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json @@ -29,7 +29,7 @@ "filters": {}, "sort": { "firstname": { - "direction": "ASCENDING" + "direction": "ascending" } }, "paginate": { @@ -65,9 +65,7 @@ "table": { "type": "table", "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", - "primary": [ - "personid" - ], + "primary": ["personid"], "name": "persons", "schema": { "year": { @@ -122,12 +120,7 @@ "name": "type", "constraints": { "presence": false, - "inclusion": [ - "support", - "designer", - "programmer", - "qa" - ] + "inclusion": ["support", "designer", "programmer", "qa"] } }, "city": { @@ -180,4 +173,4 @@ "persons": "a", "tasks": "b" } -} \ No newline at end of file +} diff --git a/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json b/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json index cec2fdb025..d81c8cf6d0 100644 --- a/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json +++ b/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json @@ -30,7 +30,7 @@ }, "sort": { "productname": { - "direction": "ASCENDING" + "direction": "ascending" } }, "paginate": { @@ -60,9 +60,7 @@ "table": { "type": "table", "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", - "primary": [ - "productid" - ], + "primary": ["productid"], "name": "products", "schema": { "productname": { @@ -106,4 +104,4 @@ "tasks": "b", "products_tasks": "c" } -} \ No newline at end of file +} diff --git a/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json index 399cb0f4d2..36010bf61d 100644 --- a/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json +++ b/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json @@ -23,7 +23,7 @@ }, "sort": { "productname": { - "direction": "ASCENDING" + "direction": "ascending" } }, "paginate": { @@ -50,9 +50,7 @@ "table": { "type": "table", "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products", - "primary": [ - "productid" - ], + "primary": ["productid"], "name": "products", "schema": { "productname": { @@ -91,4 +89,4 @@ "primaryDisplay": "productname" } } -} \ No newline at end of file +} diff --git a/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json b/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json index 2b5d156546..01b85f433f 100644 --- a/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json +++ b/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json @@ -56,7 +56,7 @@ }, "sort": { "taskname": { - "direction": "ASCENDING" + "direction": "ascending" } }, "paginate": { @@ -106,9 +106,7 @@ "table": { "type": "table", "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", - "primary": [ - "taskid" - ], + "primary": ["taskid"], "name": "tasks", "schema": { "executorid": { @@ -199,4 +197,4 @@ "persons": "c", "products_tasks": "d" } -} \ No newline at end of file +} diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts index a54d0ac1a7..b888f1adc1 100644 --- a/packages/server/src/integrations/tests/utils/index.ts +++ b/packages/server/src/integrations/tests/utils/index.ts @@ -4,8 +4,9 @@ import * as mongodb from "./mongodb" import * as mysql from "./mysql" import * as mssql from "./mssql" import * as mariadb from "./mariadb" -import { GenericContainer } from "testcontainers" +import { GenericContainer, StartedTestContainer } from "testcontainers" import { testContainerUtils } from "@budibase/backend-core/tests" +import cloneDeep from "lodash/cloneDeep" export type DatasourceProvider = () => Promise @@ -47,16 +48,16 @@ export async function getDatasources( return Promise.all(sourceNames.map(sourceName => providers[sourceName]())) } -export async function rawQuery(ds: Datasource, sql: string): Promise { +export async function knexClient(ds: Datasource) { switch (ds.source) { case SourceName.POSTGRES: { - return postgres.rawQuery(ds, sql) + return postgres.knexClient(ds) } case SourceName.MYSQL: { - return mysql.rawQuery(ds, sql) + return mysql.knexClient(ds) } case SourceName.SQL_SERVER: { - return mssql.rawQuery(ds, sql) + return mssql.knexClient(ds) } default: { throw new Error(`Unsupported source: ${ds.source}`) @@ -65,9 +66,39 @@ export async function rawQuery(ds: Datasource, sql: string): Promise { } export async function startContainer(container: GenericContainer) { - container = container.withReuse().withLabels({ "com.budibase": "true" }) + const imageName = (container as any).imageName.string as string + const key = imageName.replaceAll("/", "-").replaceAll(":", "-") - const startedContainer = await container.start() + container = container + .withReuse() + .withLabels({ "com.budibase": "true" }) + .withName(key) + + let startedContainer: StartedTestContainer | undefined = undefined + let lastError = undefined + for (let i = 0; i < 10; i++) { + try { + // container.start() is not an idempotent operation, calling `start` + // modifies the internal state of a GenericContainer instance such that + // the hash it uses to determine reuse changes. We need to clone the + // container before calling start to ensure that we're using the same + // reuse hash every time. + const containerCopy = cloneDeep(container) + startedContainer = await containerCopy.start() + lastError = undefined + break + } catch (e: any) { + lastError = e + await new Promise(resolve => setTimeout(resolve, 1000)) + } + } + + if (!startedContainer) { + if (lastError) { + throw lastError + } + throw new Error(`failed to start container: ${imageName}`) + } const info = testContainerUtils.getContainerById(startedContainer.getId()) if (!info) { diff --git a/packages/server/src/integrations/tests/utils/mariadb.ts b/packages/server/src/integrations/tests/utils/mariadb.ts index fcd79b8e56..3a90b554ee 100644 --- a/packages/server/src/integrations/tests/utils/mariadb.ts +++ b/packages/server/src/integrations/tests/utils/mariadb.ts @@ -1,9 +1,9 @@ import { Datasource, SourceName } from "@budibase/types" import { GenericContainer, Wait } from "testcontainers" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" -import { rawQuery } from "./mysql" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." +import { knexClient } from "./mysql" let ports: Promise @@ -18,7 +18,7 @@ class MariaDBWaitStrategy extends AbstractWaitStrategy { await logs.waitUntilReady(container, boundPorts, startTime) const command = Wait.forSuccessfulCommand( - `mysqladmin ping -h localhost -P 3306 -u root -ppassword` + `/usr/local/bin/healthcheck.sh --innodb_initialized` ) await command.waitUntilReady(container) } @@ -55,7 +55,8 @@ export async function getDatasource(): Promise { } const database = generator.guid().replaceAll("-", "") - await rawQuery(datasource, `CREATE DATABASE \`${database}\``) + const client = await knexClient(datasource) + await client.raw(`CREATE DATABASE \`${database}\``) datasource.config.database = database return datasource } diff --git a/packages/server/src/integrations/tests/utils/mssql.ts b/packages/server/src/integrations/tests/utils/mssql.ts index 647f461272..a4bd5448f4 100644 --- a/packages/server/src/integrations/tests/utils/mssql.ts +++ b/packages/server/src/integrations/tests/utils/mssql.ts @@ -1,8 +1,8 @@ import { Datasource, SourceName } from "@budibase/types" import { GenericContainer, Wait } from "testcontainers" -import mssql from "mssql" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." +import knex from "knex" let ports: Promise @@ -29,6 +29,9 @@ export async function getDatasource(): Promise { } const port = (await ports).find(x => x.container === 1433)?.host + if (!port) { + throw new Error("SQL Server port not found") + } const datasource: Datasource = { type: "datasource_plus", @@ -46,26 +49,23 @@ export async function getDatasource(): Promise { } const database = generator.guid().replaceAll("-", "") - await rawQuery(datasource, `CREATE DATABASE "${database}"`) + const client = await knexClient(datasource) + await client.raw(`CREATE DATABASE "${database}"`) datasource.config!.database = database return datasource } -export async function rawQuery(ds: Datasource, sql: string) { +export async function knexClient(ds: Datasource) { if (!ds.config) { throw new Error("Datasource config is missing") } if (ds.source !== SourceName.SQL_SERVER) { - throw new Error("Datasource source is not SQL Server") + throw new Error("Datasource source is not MSSQL") } - const pool = new mssql.ConnectionPool(ds.config! as mssql.config) - const client = await pool.connect() - try { - const { recordset } = await client.query(sql) - return recordset - } finally { - await pool.close() - } + return knex({ + client: "mssql", + connection: ds.config, + }) } diff --git a/packages/server/src/integrations/tests/utils/mysql.ts b/packages/server/src/integrations/tests/utils/mysql.ts index a78833e1de..c35be0689e 100644 --- a/packages/server/src/integrations/tests/utils/mysql.ts +++ b/packages/server/src/integrations/tests/utils/mysql.ts @@ -1,9 +1,9 @@ import { Datasource, SourceName } from "@budibase/types" import { GenericContainer, Wait } from "testcontainers" import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy" -import mysql from "mysql2/promise" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." +import knex from "knex" let ports: Promise @@ -38,6 +38,9 @@ export async function getDatasource(): Promise { } const port = (await ports).find(x => x.container === 3306)?.host + if (!port) { + throw new Error("MySQL port not found") + } const datasource: Datasource = { type: "datasource_plus", @@ -53,12 +56,13 @@ export async function getDatasource(): Promise { } const database = generator.guid().replaceAll("-", "") - await rawQuery(datasource, `CREATE DATABASE \`${database}\``) + const client = await knexClient(datasource) + await client.raw(`CREATE DATABASE \`${database}\``) datasource.config!.database = database return datasource } -export async function rawQuery(ds: Datasource, sql: string) { +export async function knexClient(ds: Datasource) { if (!ds.config) { throw new Error("Datasource config is missing") } @@ -66,11 +70,8 @@ export async function rawQuery(ds: Datasource, sql: string) { throw new Error("Datasource source is not MySQL") } - const connection = await mysql.createConnection(ds.config) - try { - const [rows] = await connection.query(sql) - return rows - } finally { - connection.end() - } + return knex({ + client: "mysql2", + connection: ds.config, + }) } diff --git a/packages/server/src/integrations/tests/utils/postgres.ts b/packages/server/src/integrations/tests/utils/postgres.ts index 4191b107e9..74f5722737 100644 --- a/packages/server/src/integrations/tests/utils/postgres.ts +++ b/packages/server/src/integrations/tests/utils/postgres.ts @@ -1,8 +1,8 @@ import { Datasource, SourceName } from "@budibase/types" import { GenericContainer, Wait } from "testcontainers" -import pg from "pg" import { generator, testContainerUtils } from "@budibase/backend-core/tests" import { startContainer } from "." +import knex from "knex" let ports: Promise @@ -21,6 +21,9 @@ export async function getDatasource(): Promise { } const port = (await ports).find(x => x.container === 5432)?.host + if (!port) { + throw new Error("Postgres port not found") + } const datasource: Datasource = { type: "datasource_plus", @@ -40,13 +43,14 @@ export async function getDatasource(): Promise { } const database = generator.guid().replaceAll("-", "") - await rawQuery(datasource, `CREATE DATABASE "${database}"`) + const client = await knexClient(datasource) + await client.raw(`CREATE DATABASE "${database}"`) datasource.config!.database = database return datasource } -export async function rawQuery(ds: Datasource, sql: string) { +export async function knexClient(ds: Datasource) { if (!ds.config) { throw new Error("Datasource config is missing") } @@ -54,12 +58,8 @@ export async function rawQuery(ds: Datasource, sql: string) { throw new Error("Datasource source is not Postgres") } - const client = new pg.Client(ds.config) - await client.connect() - try { - const { rows } = await client.query(sql) - return rows - } finally { - await client.end() - } + return knex({ + client: "pg", + connection: ds.config, + }) } diff --git a/packages/server/src/integrations/utils/utils.ts b/packages/server/src/integrations/utils/utils.ts index 9f04457d7a..b97782ce7e 100644 --- a/packages/server/src/integrations/utils/utils.ts +++ b/packages/server/src/integrations/utils/utils.ts @@ -9,10 +9,12 @@ import { context, objectStore, sql } from "@budibase/backend-core" import { v4 } from "uuid" import { parseStringPromise as xmlParser } from "xml2js" import { formatBytes } from "../../utilities" -import bl from "bl" import env from "../../environment" import { InvalidColumns } from "../../constants" import { helpers, utils } from "@budibase/shared-core" +import { pipeline } from "stream/promises" +import tmp from "tmp" +import fs from "fs" type PrimitiveTypes = | FieldType.STRING @@ -278,12 +280,35 @@ function copyExistingPropsOver( utils.unreachable(existingColumnType) } + // copy the BB schema in case of special props if (shouldKeepSchema) { + const fetchedColumnDefinition: FieldSchema | undefined = + table.schema[key] table.schema[key] = { ...existingTableSchema[key], externalType: existingTableSchema[key].externalType || table.schema[key]?.externalType, + autocolumn: fetchedColumnDefinition?.autocolumn, + } as FieldSchema + // check constraints which can be fetched from the DB (they could be updated) + if (fetchedColumnDefinition?.constraints) { + // inclusions are the enum values (select/options) + const fetchedConstraints = fetchedColumnDefinition.constraints + const oldConstraints = table.schema[key].constraints + table.schema[key].constraints = { + ...table.schema[key].constraints, + inclusion: fetchedConstraints.inclusion?.length + ? fetchedConstraints.inclusion + : oldConstraints?.inclusion, + } + // true or undefined - consistent with old API + if (fetchedConstraints.presence) { + table.schema[key].constraints!.presence = + fetchedConstraints.presence + } else if (oldConstraints?.presence === true) { + delete table.schema[key].constraints?.presence + } } } } @@ -360,35 +385,44 @@ export async function handleFileResponse( const key = `${context.getProdAppId()}/${processedFileName}` const bucket = objectStore.ObjectStoreBuckets.TEMP - const stream = response.body.pipe(bl((error, data) => data)) + // put the response stream to disk temporarily as a buffer + const tmpObj = tmp.fileSync() + try { + await pipeline(response.body, fs.createWriteStream(tmpObj.name)) + if (response.body) { + const contentLength = response.headers.get("content-length") + if (contentLength) { + size = parseInt(contentLength, 10) + } - if (response.body) { - const contentLength = response.headers.get("content-length") - if (contentLength) { - size = parseInt(contentLength, 10) + const details = await objectStore.streamUpload({ + bucket, + filename: key, + stream: fs.createReadStream(tmpObj.name), + ttl: 1, + type: response.headers["content-type"], + }) + if (!size && details.ContentLength) { + size = details.ContentLength + } } - - await objectStore.streamUpload({ - bucket, - filename: key, - stream, - ttl: 1, - type: response.headers["content-type"], - }) - } - presignedUrl = objectStore.getPresignedUrl(bucket, key) - return { - data: { - size, - name: processedFileName, - url: presignedUrl, - extension: fileExtension, - key: key, - }, - info: { - code: response.status, - size: formatBytes(size.toString()), - time: `${Math.round(performance.now() - startTime)}ms`, - }, + presignedUrl = objectStore.getPresignedUrl(bucket, key) + return { + data: { + size, + name: processedFileName, + url: presignedUrl, + extension: fileExtension, + key: key, + }, + info: { + code: response.status, + size: formatBytes(size.toString()), + time: `${Math.round(performance.now() - startTime)}ms`, + }, + } + } finally { + // cleanup tmp + tmpObj.removeCallback() } } diff --git a/packages/server/src/jsRunner/tests/jsRunner.spec.ts b/packages/server/src/jsRunner/tests/jsRunner.spec.ts index 7448555aac..9d2bd7104d 100644 --- a/packages/server/src/jsRunner/tests/jsRunner.spec.ts +++ b/packages/server/src/jsRunner/tests/jsRunner.spec.ts @@ -91,8 +91,13 @@ describe("jsRunner (using isolated-vm)", () => { }) it("handle test case 2", async () => { + const todayDate = new Date() + // add a year and a month + todayDate.setMonth(new Date().getMonth() + 1) + todayDate.setFullYear(todayDate.getFullYear() + 1) const context = { "Purchase Date": DATE, + today: todayDate.toISOString(), } const result = await processJS( ` @@ -100,7 +105,7 @@ describe("jsRunner (using isolated-vm)", () => { let purchaseyear = purchase.getFullYear(); let purchasemonth = purchase.getMonth(); - var today = new Date (); + var today = new Date($("today")); let todayyear = today.getFullYear(); let todaymonth = today.getMonth(); @@ -113,7 +118,7 @@ describe("jsRunner (using isolated-vm)", () => { context ) expect(result).toBeDefined() - expect(result).toBe(3) + expect(result).toBe(1) }) it("should handle test case 3", async () => { diff --git a/packages/server/src/middleware/appMigrations.ts b/packages/server/src/middleware/appMigrations.ts index 36e021c7ed..6ad356427b 100644 --- a/packages/server/src/middleware/appMigrations.ts +++ b/packages/server/src/middleware/appMigrations.ts @@ -1,9 +1,16 @@ import { UserCtx } from "@budibase/types" import { checkMissingMigrations } from "../appMigrations" +import env from "../environment" export default async (ctx: UserCtx, next: any) => { const { appId } = ctx + // migrations can be disabled via environment variable if you + // need to completely disable migrations, e.g. for testing + if (env.DISABLE_APP_MIGRATIONS) { + return next() + } + if (!appId) { return next() } diff --git a/packages/server/src/sdk/app/rows/index.ts b/packages/server/src/sdk/app/rows/index.ts index c117941419..fb077509a9 100644 --- a/packages/server/src/sdk/app/rows/index.ts +++ b/packages/server/src/sdk/app/rows/index.ts @@ -3,12 +3,14 @@ import * as rows from "./rows" import * as search from "./search" import * as utils from "./utils" import * as external from "./external" +import * as filters from "./search/filters" import AliasTables from "./sqlAlias" export default { ...attachments, ...rows, ...search, + filters, utils, external, AliasTables, diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index e347a8657d..286a88054c 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -2,8 +2,10 @@ import { EmptyFilterOption, Row, RowSearchParams, + SearchFilterOperator, SearchFilters, SearchResponse, + SortOrder, } from "@budibase/types" import { isExternalTableID } from "../../../integrations/utils" import * as internal from "./search/internal" @@ -64,11 +66,37 @@ export function removeEmptyFilters(filters: SearchFilters) { return filters } +// The frontend can send single values for array fields sometimes, so to handle +// this we convert them to arrays at the controller level so that nothing below +// this has to worry about the non-array values. +function fixupFilterArrays(filters: SearchFilters) { + const arrayFields = [ + SearchFilterOperator.ONE_OF, + SearchFilterOperator.CONTAINS, + SearchFilterOperator.NOT_CONTAINS, + SearchFilterOperator.CONTAINS_ANY, + ] + for (const searchField of arrayFields) { + const field = filters[searchField] + if (field == null) { + continue + } + + for (const key of Object.keys(field)) { + if (!Array.isArray(field[key])) { + field[key] = [field[key]] + } + } + } + return filters +} + export async function search( options: RowSearchParams ): Promise> { const isExternalTable = isExternalTableID(options.tableId) options.query = removeEmptyFilters(options.query || {}) + options.query = fixupFilterArrays(options.query) if ( !dataFilters.hasFilters(options.query) && options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE @@ -78,6 +106,10 @@ export async function search( } } + if (options.sortOrder) { + options.sortOrder = options.sortOrder.toLowerCase() as SortOrder + } + const table = await sdk.tables.getTable(options.tableId) options = searchInputMapping(table, options) diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 077f971903..93c46d8cc3 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -1,13 +1,13 @@ import { - SortJson, - SortDirection, + IncludeRelationship, Operation, PaginationJson, - IncludeRelationship, Row, - SearchFilters, RowSearchParams, + SearchFilters, SearchResponse, + SortJson, + SortOrder, Table, } from "@budibase/types" import * as exporters from "../../../../api/controllers/view/exporters" @@ -18,7 +18,7 @@ import { } from "../../../../integrations/utils" import { utils } from "@budibase/shared-core" import { ExportRowsParams, ExportRowsResult } from "./types" -import { HTTPError, db } from "@budibase/backend-core" +import { db, HTTPError } from "@budibase/backend-core" import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" import sdk from "../../../" @@ -28,20 +28,26 @@ export async function search( table: Table ): Promise> { const { tableId } = options - const { paginate, query, ...params } = options + const { countRows, paginate, query, ...params } = options const { limit } = params let bookmark = (params.bookmark && parseInt(params.bookmark as string)) || undefined if (paginate && !bookmark) { - bookmark = 1 + bookmark = 0 } - let paginateObj = {} + let paginateObj: PaginationJson | undefined - if (paginate) { + if (paginate && !limit) { + throw new Error("Cannot paginate query without a limit") + } + + if (paginate && limit) { paginateObj = { // add one so we can track if there is another page - limit: limit, - page: bookmark, + limit: limit + 1, + } + if (bookmark) { + paginateObj.offset = limit * bookmark } } else if (params && limit) { paginateObj = { @@ -52,8 +58,8 @@ export async function search( if (params.sort) { const direction = params.sortOrder === "descending" - ? SortDirection.DESCENDING - : SortDirection.ASCENDING + ? SortOrder.DESCENDING + : SortOrder.ASCENDING sort = { [params.sort]: { direction }, } @@ -69,24 +75,27 @@ export async function search( } try { - let rows = await handleRequest(Operation.READ, tableId, { + const parameters = { filters: query, sort, paginate: paginateObj as PaginationJson, includeSqlRelationships: IncludeRelationship.INCLUDE, - }) + } + const queries: Promise[] = [] + queries.push(handleRequest(Operation.READ, tableId, parameters)) + if (countRows) { + queries.push(handleRequest(Operation.COUNT, tableId, parameters)) + } + const responses = await Promise.all(queries) + let rows = responses[0] as Row[] + const totalRows = + responses.length > 1 ? (responses[1] as number) : undefined + let hasNextPage = false - if (paginate && rows.length === limit) { - const nextRows = await handleRequest(Operation.READ, tableId, { - filters: query, - sort, - paginate: { - limit: 1, - page: bookmark! * limit + 1, - }, - includeSqlRelationships: IncludeRelationship.INCLUDE, - }) - hasNextPage = nextRows.length > 0 + // remove the extra row if it's there + if (paginate && limit && rows.length > limit) { + rows.pop() + hasNextPage = true } if (options.fields) { @@ -100,7 +109,17 @@ export async function search( }) // need wrapper object for bookmarks etc when paginating - return { rows, hasNextPage, bookmark: bookmark && bookmark + 1 } + const response: SearchResponse = { rows, hasNextPage } + if (hasNextPage && bookmark != null) { + response.bookmark = bookmark + 1 + } + if (totalRows != null) { + response.totalRows = totalRows + } + if (paginate && !hasNextPage) { + response.hasNextPage = false + } + return response } catch (err: any) { if (err.message && err.message.includes("does not exist")) { throw new Error( @@ -126,6 +145,10 @@ export async function exportRows( delimiter, customHeaders, } = options + + if (!tableId) { + throw new HTTPError("No table ID for search provided.", 400) + } const { datasourceId, tableName } = breakExternalTableId(tableId) let requestQuery: SearchFilters = {} @@ -148,7 +171,7 @@ export async function exportRows( requestQuery = query || {} } - const datasource = await sdk.datasources.get(datasourceId!) + const datasource = await sdk.datasources.get(datasourceId) const table = await sdk.tables.getTable(tableId) if (!datasource || !datasource.entities) { throw new HTTPError("Datasource has not been configured for plus API.", 400) @@ -161,10 +184,6 @@ export async function exportRows( let rows: Row[] = [] let headers - if (!tableName) { - throw new HTTPError("Could not find table name.", 400) - } - // Filter data to only specified columns if required if (columns && columns.length) { for (let i = 0; i < result.rows.length; i++) { diff --git a/packages/server/src/sdk/app/rows/search/filters.ts b/packages/server/src/sdk/app/rows/search/filters.ts new file mode 100644 index 0000000000..ccce0ab86a --- /dev/null +++ b/packages/server/src/sdk/app/rows/search/filters.ts @@ -0,0 +1,62 @@ +import { + FieldType, + RelationshipFieldMetadata, + SearchFilters, + Table, +} from "@budibase/types" +import { isPlainObject } from "lodash" + +export function getRelationshipColumns(table: Table): { + name: string + definition: RelationshipFieldMetadata +}[] { + // performing this with a for loop rather than an array filter improves + // type guarding, as no casts are required + const linkEntries: [string, RelationshipFieldMetadata][] = [] + for (let entry of Object.entries(table.schema)) { + if (entry[1].type === FieldType.LINK) { + const linkColumn: RelationshipFieldMetadata = entry[1] + linkEntries.push([entry[0], linkColumn]) + } + } + return linkEntries.map(entry => ({ + name: entry[0], + definition: entry[1], + })) +} + +export function getTableIDList( + tables: Table[] +): { name: string; id: string }[] { + return tables + .filter(table => table.originalName && table._id) + .map(table => ({ id: table._id!, name: table.originalName! })) +} + +export function updateFilterKeys( + filters: SearchFilters, + updates: { original: string; updated: string }[] +): SearchFilters { + const makeFilterKeyRegex = (str: string) => + new RegExp(`^${str}\\.|:${str}\\.`) + for (let filter of Object.values(filters)) { + if (!isPlainObject(filter)) { + continue + } + for (let [key, keyFilter] of Object.entries(filter)) { + if (keyFilter === "") { + delete filter[key] + } + const possibleKey = updates.find(({ original }) => + key.match(makeFilterKeyRegex(original)) + ) + if (possibleKey && possibleKey.original !== possibleKey.updated) { + // only replace the first, not replaceAll + filter[key.replace(possibleKey.original, possibleKey.updated)] = + filter[key] + delete filter[key] + } + } + } + return filters +} diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 87b905a29f..0720600a15 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -1,4 +1,5 @@ import { + Datasource, DocumentType, FieldType, Operation, @@ -8,7 +9,6 @@ import { RowSearchParams, SearchFilters, SearchResponse, - SortDirection, SortOrder, SortType, SqlClient, @@ -18,6 +18,7 @@ import { buildInternalRelationships, sqlOutputProcessing, } from "../../../../api/controllers/row/utils" +import { mapToUserColumn, USER_COLUMN_PREFIX } from "../../tables/internal/sqs" import sdk from "../../../index" import { context, @@ -29,8 +30,16 @@ import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" import AliasTables from "../sqlAlias" import { outputProcessing } from "../../../../utilities/rowProcessor" import pick from "lodash/pick" +import { processRowCountResponse } from "../utils" +import { + updateFilterKeys, + getRelationshipColumns, + getTableIDList, +} from "./filters" +import { dataFilters } from "@budibase/shared-core" const builder = new sql.Sql(SqlClient.SQL_LITE) +const NO_SUCH_COLUMN_REGEX = new RegExp(`no such colum.+${USER_COLUMN_PREFIX}`) function buildInternalFieldList( table: Table, @@ -53,40 +62,65 @@ function buildInternalFieldList( buildInternalFieldList(relatedTable, tables, { relationships: false }) ) } else { - fieldList.push(`${table._id}.${col.name}`) + fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) } } return fieldList } -function tableNameInFieldRegex(tableName: string) { - return new RegExp(`^${tableName}.|:${tableName}.`, "g") -} - -function cleanupFilters(filters: SearchFilters, tables: Table[]) { - for (let filter of Object.values(filters)) { - if (typeof filter !== "object") { - continue - } - for (let [key, keyFilter] of Object.entries(filter)) { - if (keyFilter === "") { - delete filter[key] - } - - // relationship, switch to table ID - const tableRelated = tables.find( - table => - table.originalName && - key.match(tableNameInFieldRegex(table.originalName)) +function cleanupFilters( + filters: SearchFilters, + table: Table, + allTables: Table[] +) { + // get a list of all relationship columns in the table for updating + const relationshipColumns = getRelationshipColumns(table) + // get table names to ID map for relationships + const tableNameToID = getTableIDList(allTables) + // all should be applied at once + filters = updateFilterKeys( + filters, + relationshipColumns + .map(({ name, definition }) => ({ + original: name, + updated: definition.tableId, + })) + .concat( + tableNameToID.map(({ name, id }) => ({ + original: name, + updated: id, + })) ) - if (tableRelated && tableRelated.originalName) { - // only replace the first, not replaceAll - filter[key.replace(tableRelated.originalName, tableRelated._id!)] = - filter[key] + ) + + // generate a map of all possible column names (these can be duplicated across tables + // the map of them will always be the same + const userColumnMap: Record = {} + allTables.forEach(table => + Object.keys(table.schema).forEach( + key => (userColumnMap[key] = mapToUserColumn(key)) + ) + ) + + // update the keys of filters to manage user columns + const keyInAnyTable = (key: string): boolean => + allTables.some(table => table.schema[key]) + + const splitter = new dataFilters.ColumnSplitter(allTables) + for (const filter of Object.values(filters)) { + for (const key of Object.keys(filter)) { + const { numberPrefix, relationshipPrefix, column } = splitter.run(key) + if (keyInAnyTable(column)) { + filter[ + `${numberPrefix || ""}${relationshipPrefix || ""}${mapToUserColumn( + column + )}` + ] = filter[key] delete filter[key] } } } + return filters } @@ -96,14 +130,48 @@ function buildTableMap(tables: Table[]) { // update the table name, should never query by name for SQLite table.originalName = table.name table.name = table._id! + // need a primary for sorting, lookups etc + table.primary = ["_id"] tableMap[table._id!] = table } return tableMap } -async function runSqlQuery(json: QueryJson, tables: Table[]) { +function reverseUserColumnMapping(rows: Row[]) { + const prefixLength = USER_COLUMN_PREFIX.length + return rows.map(row => { + const finalRow: Row = {} + for (let key of Object.keys(row)) { + // it should be the first prefix + const index = key.indexOf(USER_COLUMN_PREFIX) + if (index !== -1) { + // cut out the prefix + const newKey = key.slice(0, index) + key.slice(index + prefixLength) + finalRow[newKey] = row[key] + } else { + finalRow[key] = row[key] + } + } + return finalRow + }) +} + +function runSqlQuery(json: QueryJson, tables: Table[]): Promise +function runSqlQuery( + json: QueryJson, + tables: Table[], + opts: { countTotalRows: true } +): Promise +async function runSqlQuery( + json: QueryJson, + tables: Table[], + opts?: { countTotalRows?: boolean } +) { const alias = new AliasTables(tables.map(table => table.name)) - return await alias.queryWithAliasing(json, async json => { + if (opts?.countTotalRows) { + json.endpoint.operation = Operation.COUNT + } + const processSQLQuery = async (_: Datasource, json: QueryJson) => { const query = builder._query(json, { disableReturning: true, }) @@ -125,17 +193,28 @@ async function runSqlQuery(json: QueryJson, tables: Table[]) { const db = context.getAppDB() return await db.sql(sql, bindings) - }) + } + const response = await alias.queryWithAliasing(json, processSQLQuery) + if (opts?.countTotalRows) { + return processRowCountResponse(response) + } else if (Array.isArray(response)) { + return reverseUserColumnMapping(response) + } + return response } export async function search( options: RowSearchParams, table: Table ): Promise> { - const { paginate, query, ...params } = options + let { paginate, query, ...params } = options const allTables = await sdk.tables.getAllInternalTables() const allTablesMap = buildTableMap(allTables) + // make sure we have the mapped/latest table + if (table?._id) { + table = allTablesMap[table?._id] + } if (!table) { throw new Error("Unable to find table") } @@ -150,13 +229,14 @@ export async function search( operation: Operation.READ, }, filters: { - ...cleanupFilters(query, allTables), + ...cleanupFilters(query, table, allTables), documentType: DocumentType.ROW, }, table, meta: { table, tables: allTablesMap, + columnPrefix: USER_COLUMN_PREFIX, }, resource: { fields: buildInternalFieldList(table, allTables), @@ -168,13 +248,9 @@ export async function search( const sortField = table.schema[params.sort] const sortType = sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING - const sortDirection = - params.sortOrder === SortOrder.ASCENDING - ? SortDirection.ASCENDING - : SortDirection.DESCENDING request.sort = { - [sortField.name]: { - direction: sortDirection, + [mapToUserColumn(sortField.name)]: { + direction: params.sortOrder || SortOrder.ASCENDING, type: sortType as SortType, }, } @@ -183,16 +259,31 @@ export async function search( if (params.bookmark && typeof params.bookmark !== "number") { throw new Error("Unable to paginate with string based bookmarks") } - const bookmark: number = (params.bookmark as number) || 1 - const limit = params.limit - if (paginate && params.limit) { + + const bookmark: number = (params.bookmark as number) || 0 + if (params.limit) { + paginate = true request.paginate = { limit: params.limit + 1, - page: bookmark, + offset: bookmark * params.limit, } } + try { - const rows = await runSqlQuery(request, allTables) + const queries: Promise[] = [] + queries.push(runSqlQuery(request, allTables)) + if (options.countRows) { + // get the total count of rows + queries.push( + runSqlQuery(request, allTables, { + countTotalRows: true, + }) + ) + } + const responses = await Promise.all(queries) + let rows = responses[0] as Row[] + const totalRows = + responses.length > 1 ? (responses[1] as number) : undefined // process from the format of tableId.column to expected format also // make sure JSON columns corrected @@ -205,7 +296,8 @@ export async function search( // check for pagination final row let nextRow: Row | undefined - if (paginate && params.limit && processed.length > params.limit) { + if (paginate && params.limit && rows.length > params.limit) { + // remove the extra row that confirmed if there is another row to move to nextRow = processed.pop() } @@ -221,30 +313,27 @@ export async function search( finalRows = finalRows.map((r: any) => pick(r, fields)) } - // check for pagination - if (paginate && limit) { - const response: SearchResponse = { - rows: finalRows, - } - const prevLimit = request.paginate!.limit - request.paginate = { - limit: 1, - page: bookmark * prevLimit + 1, - } - const hasNextPage = !!nextRow - response.hasNextPage = hasNextPage - if (hasNextPage) { - response.bookmark = bookmark + 1 - } - return response - } else { - return { - rows: finalRows, - } + const response: SearchResponse = { + rows: finalRows, } + if (totalRows != null) { + response.totalRows = totalRows + } + // check for pagination + if (paginate && nextRow) { + response.hasNextPage = true + response.bookmark = bookmark + 1 + } + if (paginate && !nextRow) { + response.hasNextPage = false + } + return response } catch (err: any) { const msg = typeof err === "string" ? err : err.message - if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) { + const syncAndRepeat = + (err.status === 400 && msg?.match(NO_SUCH_COLUMN_REGEX)) || + (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) + if (syncAndRepeat) { await sdk.tables.sqs.syncDefinition() return search(options, table) } diff --git a/packages/server/src/sdk/app/rows/sqlAlias.ts b/packages/server/src/sdk/app/rows/sqlAlias.ts index ab4f5d2844..bc8fc56d5e 100644 --- a/packages/server/src/sdk/app/rows/sqlAlias.ts +++ b/packages/server/src/sdk/app/rows/sqlAlias.ts @@ -11,7 +11,12 @@ import { SQS_DATASOURCE_INTERNAL } from "@budibase/backend-core" import { getSQLClient } from "./utils" import { cloneDeep } from "lodash" import datasources from "../datasources" -import { makeExternalQuery } from "../../../integrations/base/query" +import { BudibaseInternalDB } from "../../../db/utils" + +type PerformQueryFunction = ( + datasource: Datasource, + json: QueryJson +) => Promise const WRITE_OPERATIONS: Operation[] = [ Operation.CREATE, @@ -65,7 +70,7 @@ export default class AliasTables { this.charSeq = new CharSequence() } - isAliasingEnabled(json: QueryJson, datasource: Datasource) { + isAliasingEnabled(json: QueryJson, datasource?: Datasource) { const operation = json.endpoint.operation const fieldLength = json.resource?.fields?.length if ( @@ -75,6 +80,10 @@ export default class AliasTables { ) { return false } + // SQS - doesn't have a datasource + if (!datasource) { + return true + } try { const sqlClient = getSQLClient(datasource) const isWrite = WRITE_OPERATIONS.includes(operation) @@ -167,13 +176,14 @@ export default class AliasTables { async queryWithAliasing( json: QueryJson, - queryFn?: (json: QueryJson) => Promise + queryFn: PerformQueryFunction ): Promise { const datasourceId = json.endpoint.datasourceId const isSqs = datasourceId === SQS_DATASOURCE_INTERNAL - let aliasingEnabled: boolean, datasource: Datasource | undefined + let aliasingEnabled: boolean, datasource: Datasource if (isSqs) { - aliasingEnabled = true + aliasingEnabled = this.isAliasingEnabled(json) + datasource = BudibaseInternalDB } else { datasource = await datasources.get(datasourceId) aliasingEnabled = this.isAliasingEnabled(json, datasource) @@ -225,14 +235,7 @@ export default class AliasTables { json.tableAliases = invertedTableAliases } - let response: DatasourcePlusQueryResponse - if (datasource && !isSqs) { - response = await makeExternalQuery(datasource, json) - } else if (queryFn) { - response = await queryFn(json) - } else { - throw new Error("No supplied method to perform aliased query") - } + let response: DatasourcePlusQueryResponse = await queryFn(datasource, json) if (Array.isArray(response) && aliasingEnabled) { return this.reverse(response) } else { diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index bb37fd99f3..cd1b663f6a 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -50,6 +50,17 @@ export function getSQLClient(datasource: Datasource): SqlClient { throw new Error("Unable to determine client for SQL datasource") } +export function processRowCountResponse( + response: DatasourcePlusQueryResponse +): number { + if (response && response.length === 1 && "total" in response[0]) { + const total = response[0].total + return typeof total === "number" ? total : parseInt(total) + } else { + throw new Error("Unable to count rows in query - no count response") + } +} + export async function getDatasourceAndQuery( json: QueryJson ): Promise { diff --git a/packages/server/src/sdk/app/tables/getters.ts b/packages/server/src/sdk/app/tables/getters.ts index 355493579d..738e57eff8 100644 --- a/packages/server/src/sdk/app/tables/getters.ts +++ b/packages/server/src/sdk/app/tables/getters.ts @@ -90,10 +90,10 @@ export async function getExternalTable( export async function getTable(tableId: string): Promise
{ const db = context.getAppDB() let output: Table - if (isExternalTableID(tableId)) { + if (tableId && isExternalTableID(tableId)) { let { datasourceId, tableName } = breakExternalTableId(tableId) - const datasource = await datasources.get(datasourceId!) - const table = await getExternalTable(datasourceId!, tableName!) + const datasource = await datasources.get(datasourceId) + const table = await getExternalTable(datasourceId, tableName) output = { ...table, sql: isSQL(datasource) } } else { output = await db.get
(tableId) diff --git a/packages/server/src/sdk/app/tables/internal/index.ts b/packages/server/src/sdk/app/tables/internal/index.ts index ea40d2bfe9..fc32708708 100644 --- a/packages/server/src/sdk/app/tables/internal/index.ts +++ b/packages/server/src/sdk/app/tables/internal/index.ts @@ -17,6 +17,7 @@ import { cloneDeep } from "lodash/fp" import isEqual from "lodash/isEqual" import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula" import { context } from "@budibase/backend-core" +import { findDuplicateInternalColumns } from "@budibase/shared-core" import { getTable } from "../getters" import { checkAutoColumns } from "./utils" import * as viewsSdk from "../../views" @@ -44,6 +45,17 @@ export async function save( if (hasTypeChanged(table, oldTable)) { throw new Error("A column type has changed.") } + + // check for case sensitivity - we don't want to allow duplicated columns + const duplicateColumn = findDuplicateInternalColumns(table) + if (duplicateColumn.length) { + throw new Error( + `Column(s) "${duplicateColumn.join( + ", " + )}" are duplicated - check for other columns with these name (case in-sensitive)` + ) + } + // check that subtypes have been maintained table = checkAutoColumns(table, oldTable) diff --git a/packages/server/src/sdk/app/tables/internal/sqs.ts b/packages/server/src/sdk/app/tables/internal/sqs.ts index eb57d1f3b8..f892a9c6c8 100644 --- a/packages/server/src/sdk/app/tables/internal/sqs.ts +++ b/packages/server/src/sdk/app/tables/internal/sqs.ts @@ -14,6 +14,7 @@ import { CONSTANT_INTERNAL_ROW_COLS, generateJunctionTableID, } from "../../../../db/utils" +import { isEqual } from "lodash" const FieldTypeMap: Record = { [FieldType.BOOLEAN]: SQLiteType.NUMERIC, @@ -61,10 +62,18 @@ function buildRelationshipDefinitions( } } +export const USER_COLUMN_PREFIX = "data_" + +// utility function to denote that columns in SQLite are mapped to avoid overlap issues +// the overlaps can occur due to case insensitivity and some of the columns which Budibase requires +export function mapToUserColumn(key: string) { + return `${USER_COLUMN_PREFIX}${key}` +} + // this can generate relationship tables as part of the mapping function mapTable(table: Table): SQLiteTables { const tables: SQLiteTables = {} - const fields: Record = {} + const fields: Record = {} for (let [key, column] of Object.entries(table.schema)) { // relationships should be handled differently if (column.type === FieldType.LINK) { @@ -77,7 +86,10 @@ function mapTable(table: Table): SQLiteTables { if (!FieldTypeMap[column.type]) { throw new Error(`Unable to map type "${column.type}" to SQLite type`) } - fields[key] = FieldTypeMap[column.type] + fields[mapToUserColumn(key)] = { + field: key, + type: FieldTypeMap[column.type], + } } // there are some extra columns to map - add these in const constantMap: Record = {} @@ -107,8 +119,22 @@ async function buildBaseDefinition(): Promise { export async function syncDefinition(): Promise { const db = context.getAppDB() + let existing: SQLiteDefinition | undefined + try { + existing = await db.get(SQLITE_DESIGN_DOC_ID) + } catch (err: any) { + if (err.status !== 404) { + throw err + } + } const definition = await buildBaseDefinition() - await db.put(definition) + if (existing) { + definition._rev = existing._rev + } + // only write if something has changed + if (!existing || !isEqual(existing.sql, definition.sql)) { + await db.put(definition) + } } export async function addTable(table: Table) { diff --git a/packages/server/src/sdk/app/views/external.ts b/packages/server/src/sdk/app/views/external.ts index 0f96bcc061..2b3e271597 100644 --- a/packages/server/src/sdk/app/views/external.ts +++ b/packages/server/src/sdk/app/views/external.ts @@ -10,9 +10,9 @@ export async function get(viewId: string): Promise { const { tableId } = utils.extractViewInfoFromID(viewId) const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) + const ds = await sdk.datasources.get(datasourceId) - const table = ds.entities![tableName!] + const table = ds.entities![tableName] const views = Object.values(table.views!).filter(isV2) const found = views.find(v => v.id === viewId) if (!found) { @@ -25,9 +25,9 @@ export async function getEnriched(viewId: string): Promise { const { tableId } = utils.extractViewInfoFromID(viewId) const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) + const ds = await sdk.datasources.get(datasourceId) - const table = ds.entities![tableName!] + const table = ds.entities![tableName] const views = Object.values(table.views!).filter(isV2) const found = views.find(v => v.id === viewId) if (!found) { @@ -49,9 +49,9 @@ export async function create( const db = context.getAppDB() const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) - ds.entities![tableName!].views ??= {} - ds.entities![tableName!].views![view.name] = view + const ds = await sdk.datasources.get(datasourceId) + ds.entities![tableName].views ??= {} + ds.entities![tableName].views![view.name] = view await db.put(ds) return view } @@ -60,9 +60,9 @@ export async function update(tableId: string, view: ViewV2): Promise { const db = context.getAppDB() const { datasourceId, tableName } = breakExternalTableId(tableId) - const ds = await sdk.datasources.get(datasourceId!) - ds.entities![tableName!].views ??= {} - const views = ds.entities![tableName!].views! + const ds = await sdk.datasources.get(datasourceId) + ds.entities![tableName].views ??= {} + const views = ds.entities![tableName].views! const existingView = Object.values(views).find( v => isV2(v) && v.id === view.id @@ -87,9 +87,9 @@ export async function remove(viewId: string): Promise { } const { datasourceId, tableName } = breakExternalTableId(view.tableId) - const ds = await sdk.datasources.get(datasourceId!) + const ds = await sdk.datasources.get(datasourceId) - delete ds.entities![tableName!].views![view?.name] + delete ds.entities![tableName].views![view?.name] await db.put(ds) return view } diff --git a/packages/server/src/sdk/app/views/index.ts b/packages/server/src/sdk/app/views/index.ts index ea05ecf512..b6ac7b6f6b 100644 --- a/packages/server/src/sdk/app/views/index.ts +++ b/packages/server/src/sdk/app/views/index.ts @@ -8,7 +8,8 @@ import { } from "@budibase/types" import { HTTPError, db as dbCore } from "@budibase/backend-core" import { features } from "@budibase/pro" -import { cloneDeep } from "lodash" +import { helpers } from "@budibase/shared-core" +import { cloneDeep } from "lodash/fp" import * as utils from "../../../db/utils" import { isExternalTableID } from "../../../integrations/utils" @@ -16,7 +17,6 @@ import { isExternalTableID } from "../../../integrations/utils" import * as internal from "./internal" import * as external from "./external" import sdk from "../../../sdk" -import { isRequired } from "../../../utilities/schema" function pickApi(tableId: any) { if (isExternalTableID(tableId)) { @@ -37,11 +37,9 @@ export async function getEnriched(viewId: string): Promise { async function guardViewSchema( tableId: string, - viewSchema?: Record + view: Omit ) { - if (!viewSchema || !Object.keys(viewSchema).length) { - return - } + const viewSchema = view.schema || {} const table = await sdk.tables.getTable(tableId) for (const field of Object.keys(viewSchema)) { @@ -54,18 +52,11 @@ async function guardViewSchema( } if (viewSchema[field].readonly) { - if (!(await features.isViewReadonlyColumnsEnabled())) { - throw new HTTPError( - `Readonly fields are not enabled for your tenant`, - 400 - ) - } - - if (isRequired(tableSchemaField.constraints)) { - throw new HTTPError( - `Field "${field}" cannot be readonly as it is a required field`, - 400 - ) + if ( + !(await features.isViewReadonlyColumnsEnabled()) && + !(tableSchemaField as ViewUIFieldMetadata).readonly + ) { + throw new HTTPError(`Readonly fields are not enabled`, 400) } if (!viewSchema[field].visible) { @@ -76,19 +67,61 @@ async function guardViewSchema( } } } + + const existingView = + table?.views && (table.views[view.name] as ViewV2 | undefined) + + for (const field of Object.values(table.schema)) { + if (!helpers.schema.isRequired(field.constraints)) { + continue + } + + const viewSchemaField = viewSchema[field.name] + const existingViewSchema = + existingView?.schema && existingView.schema[field.name] + if (!viewSchemaField && !existingViewSchema?.visible) { + // Supporting existing configs with required columns but hidden in views + continue + } + + if (!viewSchemaField?.visible) { + throw new HTTPError( + `You can't hide "${field.name}" because it is a required field.`, + 400 + ) + } + + if (viewSchemaField.readonly) { + throw new HTTPError( + `You can't make "${field.name}" readonly because it is a required field.`, + 400 + ) + } + } + + if (view.primaryDisplay) { + const viewSchemaField = viewSchema[view.primaryDisplay] + + if (!viewSchemaField?.visible) { + throw new HTTPError( + `You can't hide "${view.primaryDisplay}" because it is the display column.`, + 400 + ) + } + } } export async function create( tableId: string, viewRequest: Omit ): Promise { - await guardViewSchema(tableId, viewRequest.schema) + await guardViewSchema(tableId, viewRequest) return pickApi(tableId).create(tableId, viewRequest) } export async function update(tableId: string, view: ViewV2): Promise { - await guardViewSchema(tableId, view.schema) + await guardViewSchema(tableId, view) return pickApi(tableId).update(tableId, view) } diff --git a/packages/server/src/startup/index.ts b/packages/server/src/startup/index.ts index 48d500a0cf..c86b0eb264 100644 --- a/packages/server/src/startup/index.ts +++ b/packages/server/src/startup/index.ts @@ -15,6 +15,7 @@ import * as fileSystem from "../utilities/fileSystem" import { default as eventEmitter, init as eventInit } from "../events" import * as migrations from "../migrations" import * as bullboard from "../automations/bullboard" +import * as appMigrations from "../appMigrations/queue" import * as pro from "@budibase/pro" import * as api from "../api" import sdk from "../sdk" @@ -69,6 +70,9 @@ export async function startup( return } printFeatures() + if (env.BUDIBASE_ENVIRONMENT) { + console.log(`service running environment: "${env.BUDIBASE_ENVIRONMENT}"`) + } STARTUP_RAN = true if (app && server && !env.CLUSTER_MODE) { console.log(`Budibase running on ${JSON.stringify(server.address())}`) @@ -114,8 +118,10 @@ export async function startup( // configure events to use the pro audit log write // can't integrate directly into backend-core due to cyclic issues queuePromises.push(events.processors.init(pro.sdk.auditLogs.write)) + // app migrations and automations on other service if (automationsEnabled()) { queuePromises.push(automations.init()) + queuePromises.push(appMigrations.init()) } queuePromises.push(initPro()) if (app) { diff --git a/packages/server/src/tests/utilities/api/datasource.ts b/packages/server/src/tests/utilities/api/datasource.ts index bb4c74093c..67484a688a 100644 --- a/packages/server/src/tests/utilities/api/datasource.ts +++ b/packages/server/src/tests/utilities/api/datasource.ts @@ -6,6 +6,7 @@ import { UpdateDatasourceRequest, QueryJson, BuildSchemaFromSourceResponse, + FetchDatasourceInfoResponse, } from "@budibase/types" import { Expectations, TestAPI } from "./base" @@ -61,6 +62,10 @@ export class DatasourceAPI extends TestAPI { }) } + fetch = async (expectations?: Expectations) => { + return await this._get(`/api/datasources`, { expectations }) + } + query = async ( query: Omit & Partial>, expectations?: Expectations @@ -71,10 +76,29 @@ export class DatasourceAPI extends TestAPI { }) } - fetchSchema = async (id: string, expectations?: Expectations) => { + fetchSchema = async ( + { + datasourceId, + tablesFilter, + }: { datasourceId: string; tablesFilter?: string[] }, + expectations?: Expectations + ) => { return await this._post( - `/api/datasources/${id}/schema`, + `/api/datasources/${datasourceId}/schema`, { + expectations: expectations, + body: { + tablesFilter: tablesFilter, + }, + } + ) + } + + info = async (datasource: Datasource, expectations?: Expectations) => { + return await this._post( + `/api/datasources/info`, + { + body: { datasource }, expectations, } ) diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index 7213cc66f1..a59719ab2c 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -359,6 +359,36 @@ export function collectAutomation(tableId?: string): Automation { return automation as Automation } +export function filterAutomation(tableId?: string): Automation { + const automation: any = { + name: "looping", + type: "automation", + definition: { + steps: [ + { + id: "b", + type: "ACTION", + internal: true, + stepId: AutomationActionStepId.FILTER, + inputs: {}, + schema: BUILTIN_ACTION_DEFINITIONS.EXECUTE_SCRIPT.schema, + }, + ], + trigger: { + id: "a", + type: "TRIGGER", + event: "row:save", + stepId: AutomationTriggerStepId.ROW_SAVED, + inputs: { + tableId, + }, + schema: TRIGGER_DEFINITIONS.ROW_SAVED.schema, + }, + }, + } + return automation as Automation +} + export function basicAutomationResults( automationId: string ): AutomationResults { diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 469d0845c9..a7cf71de4b 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -7,6 +7,8 @@ import { } from "../automations/utils" import * as actions from "../automations/actions" import * as automationUtils from "../automations/automationUtils" +import { replaceFakeBindings } from "../automations/loopUtils" + import { default as AutomationEmitter } from "../events/AutomationEmitter" import { generateAutomationMetadataID, isProdAppID } from "../db/utils" import { definitions as triggerDefs } from "../automations/triggerInfo" @@ -214,15 +216,15 @@ class Orchestrator { } updateContextAndOutput( - loopStepNumber: number | undefined, + currentLoopStepIndex: number | undefined, step: AutomationStep, output: any, result: { success: boolean; status: string } ) { - if (!loopStepNumber) { + if (!currentLoopStepIndex) { throw new Error("No loop step number provided.") } - this.executionOutput.steps.splice(loopStepNumber, 0, { + this.executionOutput.steps.splice(currentLoopStepIndex, 0, { id: step.id, stepId: step.stepId, outputs: { @@ -232,7 +234,7 @@ class Orchestrator { }, inputs: step.inputs, }) - this._context.steps.splice(loopStepNumber, 0, { + this._context.steps.splice(currentLoopStepIndex, 0, { ...output, success: result.success, status: result.status, @@ -256,7 +258,7 @@ class Orchestrator { let loopStep: LoopStep | undefined = undefined let stepCount = 0 - let loopStepNumber: any = undefined + let currentLoopStepIndex: number = 0 let loopSteps: LoopStep[] | undefined = [] let metadata let timeoutFlag = false @@ -290,7 +292,7 @@ class Orchestrator { }, }) - let input: any, + let input: LoopInput | undefined, iterations = 1, iterationCount = 0 @@ -309,19 +311,19 @@ class Orchestrator { stepCount++ if (step.stepId === LOOP_STEP_ID) { loopStep = step as LoopStep - loopStepNumber = stepCount + currentLoopStepIndex = stepCount continue } if (loopStep) { input = await processObject(loopStep.inputs, this._context) - iterations = getLoopIterations(loopStep as LoopStep) + iterations = getLoopIterations(loopStep) stepSpan?.addTags({ step: { iterations } }) } - for (let index = 0; index < iterations; index++) { + + for (let stepIndex = 0; stepIndex < iterations; stepIndex++) { let originalStepInput = cloneDeep(step.inputs) - // Handle if the user has set a max iteration count or if it reaches the max limit set by us - if (loopStep && input.binding) { + if (loopStep && input?.binding) { let tempOutput = { items: loopSteps, iterations: iterationCount, @@ -332,7 +334,7 @@ class Orchestrator { ) } catch (err) { this.updateContextAndOutput( - loopStepNumber, + currentLoopStepIndex, step, tempOutput, { @@ -353,55 +355,22 @@ class Orchestrator { } else if (Array.isArray(loopStep.inputs.binding)) { item = loopStep.inputs.binding } - this._context.steps[loopStepNumber] = { - currentItem: item[index], + this._context.steps[currentLoopStepIndex] = { + currentItem: item[stepIndex], } - // The "Loop" binding in the front end is "fake", so replace it here so the context can understand it - // Pretty hacky because we need to account for the row object - for (let [key, value] of Object.entries(originalStepInput)) { - if (typeof value === "object") { - for (let [innerKey, innerValue] of Object.entries( - originalStepInput[key] - )) { - if (typeof innerValue === "string") { - originalStepInput[key][innerKey] = - automationUtils.substituteLoopStep( - innerValue, - `steps.${loopStepNumber}` - ) - } else if (typeof value === "object") { - for (let [innerObject, innerValue] of Object.entries( - originalStepInput[key][innerKey] - )) { - if (typeof innerValue === "string") { - originalStepInput[key][innerKey][innerObject] = - automationUtils.substituteLoopStep( - innerValue, - `steps.${loopStepNumber}` - ) - } - } - } - } - } else { - if (typeof value === "string") { - originalStepInput[key] = - automationUtils.substituteLoopStep( - value, - `steps.${loopStepNumber}` - ) - } - } - } + originalStepInput = replaceFakeBindings( + originalStepInput, + currentLoopStepIndex + ) if ( - index === env.AUTOMATION_MAX_ITERATIONS || + stepIndex === env.AUTOMATION_MAX_ITERATIONS || (loopStep.inputs.iterations && - index === parseInt(loopStep.inputs.iterations)) + stepIndex === parseInt(loopStep.inputs.iterations)) ) { this.updateContextAndOutput( - loopStepNumber, + currentLoopStepIndex, step, tempOutput, { @@ -416,7 +385,7 @@ class Orchestrator { let isFailure = false const currentItem = - this._context.steps[loopStepNumber]?.currentItem + this._context.steps[currentLoopStepIndex]?.currentItem if (currentItem && typeof currentItem === "object") { isFailure = Object.keys(currentItem).some(value => { return currentItem[value] === loopStep?.inputs.failure @@ -428,7 +397,7 @@ class Orchestrator { if (isFailure) { this.updateContextAndOutput( - loopStepNumber, + currentLoopStepIndex, step, tempOutput, { @@ -453,7 +422,6 @@ class Orchestrator { continue } - // If it's a loop step, we need to manually add the bindings to the context let stepFn = await this.getStepFunctionality(step.stepId) let inputs = await processObject(originalStepInput, this._context) inputs = automationUtils.cleanInputValues( @@ -502,9 +470,9 @@ class Orchestrator { if (loopStep) { iterationCount++ - if (index === iterations - 1) { + if (stepIndex === iterations - 1) { loopStep = undefined - this._context.steps.splice(loopStepNumber, 1) + this._context.steps.splice(currentLoopStepIndex, 1) break } } @@ -515,7 +483,7 @@ class Orchestrator { if (loopStep && iterations === 0) { loopStep = undefined - this.executionOutput.steps.splice(loopStepNumber + 1, 0, { + this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, { id: step.id, stepId: step.stepId, outputs: { @@ -525,14 +493,14 @@ class Orchestrator { inputs: {}, }) - this._context.steps.splice(loopStepNumber, 1) + this._context.steps.splice(currentLoopStepIndex, 1) iterations = 1 } // Delete the step after the loop step as it's irrelevant, since information is included // in the loop step if (wasLoopStep && !loopStep) { - this._context.steps.splice(loopStepNumber + 1, 1) + this._context.steps.splice(currentLoopStepIndex + 1, 1) wasLoopStep = false } if (loopSteps && loopSteps.length) { @@ -541,13 +509,13 @@ class Orchestrator { items: loopSteps, iterations: iterationCount, } - this.executionOutput.steps.splice(loopStepNumber + 1, 0, { + this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, { id: step.id, stepId: step.stepId, outputs: tempOutput, inputs: step.inputs, }) - this._context.steps[loopStepNumber] = tempOutput + this._context.steps[currentLoopStepIndex] = tempOutput wasLoopStep = true loopSteps = [] diff --git a/packages/server/src/threads/query.ts b/packages/server/src/threads/query.ts index 54322b1156..ba451a3325 100644 --- a/packages/server/src/threads/query.ts +++ b/packages/server/src/threads/query.ts @@ -196,12 +196,22 @@ class QueryRunner { return { rows, keys, info, extra, pagination } } - async runAnotherQuery(queryId: string, parameters: any) { + async runAnotherQuery( + queryId: string, + currentParameters: Record + ) { const db = context.getAppDB() const query = await db.get(queryId) const datasource = await sdk.datasources.get(query.datasourceId, { enriched: true, }) + // enrich parameters with dynamic queries defaults + const defaultParams = query.parameters || [] + for (let param of defaultParams) { + if (!currentParameters[param.name]) { + currentParameters[param.name] = param.default + } + } return new QueryRunner( { schema: query.schema, @@ -210,7 +220,7 @@ class QueryRunner { transformer: query.transformer, nullDefaultSupport: query.nullDefaultSupport, ctx: this.ctx, - parameters, + parameters: currentParameters, datasource, queryId, }, diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index 7fea417d5a..4bd4e8f583 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -4,9 +4,9 @@ import { TableSchema, FieldSchema, Row, - FieldConstraints, + Table, } from "@budibase/types" -import { ValidColumnNameRegex, utils } from "@budibase/shared-core" +import { ValidColumnNameRegex, helpers, utils } from "@budibase/shared-core" import { db } from "@budibase/backend-core" import { parseCsvExport } from "../api/controllers/view/exporters" @@ -41,15 +41,6 @@ export function isRows(rows: any): rows is Rows { return Array.isArray(rows) && rows.every(row => typeof row === "object") } -export function isRequired(constraints: FieldConstraints | undefined) { - const isRequired = - !!constraints && - ((typeof constraints.presence !== "boolean" && - constraints.presence?.allowEmpty === false) || - constraints.presence === true) - return isRequired -} - export function validate(rows: Rows, schema: TableSchema): ValidationResults { const results: ValidationResults = { schemaValidation: {}, @@ -88,7 +79,6 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { } else if ( // If there's no data for this field don't bother with further checks // If the field is already marked as invalid there's no need for further checks - results.schemaValidation[columnName] === false || columnData == null || isAutoColumn ) { @@ -109,7 +99,7 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { columnData, columnType, columnSubtype, - isRequired(constraints) + helpers.schema.isRequired(constraints) ) ) { results.schemaValidation[columnName] = false @@ -128,16 +118,26 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { return results } -export function parse(rows: Rows, schema: TableSchema): Rows { +export function parse(rows: Rows, table: Table): Rows { return rows.map(row => { const parsedRow: Row = {} Object.entries(row).forEach(([columnName, columnData]) => { - if (!(columnName in schema) || schema[columnName]?.autocolumn) { + const schema = table.schema + if (!(columnName in schema)) { // Objects can be present in the row data but not in the schema, so make sure we don't proceed in such a case return } + if ( + schema[columnName].autocolumn && + !table.primary?.includes(columnName) + ) { + // Don't want the user specifying values for autocolumns unless they're updating + // a row through its primary key. + return + } + const columnSchema = schema[columnName] const { type: columnType } = columnSchema if (columnType === FieldType.NUMBER) { diff --git a/packages/shared-core/package.json b/packages/shared-core/package.json index 3049afdb95..da74d090b6 100644 --- a/packages/shared-core/package.json +++ b/packages/shared-core/package.json @@ -11,7 +11,7 @@ "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "test": "jest", "test:watch": "yarn test --watchAll" }, @@ -21,7 +21,7 @@ }, "devDependencies": { "rimraf": "3.0.2", - "typescript": "5.2.2" + "typescript": "5.5.2" }, "nx": { "targets": { diff --git a/packages/shared-core/src/constants/index.ts b/packages/shared-core/src/constants/index.ts index afb7e659e1..0713b5d2f8 100644 --- a/packages/shared-core/src/constants/index.ts +++ b/packages/shared-core/src/constants/index.ts @@ -1,5 +1,6 @@ export * from "./api" export * from "./fields" +export * from "./rows" export const OperatorOptions = { Equals: { @@ -163,14 +164,17 @@ export const InvalidFileExtensions = [ export enum BpmCorrelationKey { ONBOARDING = "budibase:onboarding:correlationkey", + VERIFY_SSO_LOGIN = "budibase:verify_sso_login:correlationkey", } export enum BpmInstanceKey { ONBOARDING = "budibase:onboarding:instancekey", + VERIFY_SSO_LOGIN = "budibase:verify_sso_login:instancekey", } export enum BpmStatusKey { ONBOARDING = "budibase:onboarding:status", + VERIFY_SSO_LOGIN = "budibase:verify_sso_login:status", } export enum BpmStatusValue { @@ -179,3 +183,5 @@ export enum BpmStatusValue { VERIFYING_EMAIL = "verifying_email", COMPLETED = "completed", } + +export const DEFAULT_BB_DATASOURCE_ID = "datasource_internal_bb_default" diff --git a/packages/shared-core/src/constants/rows.ts b/packages/shared-core/src/constants/rows.ts new file mode 100644 index 0000000000..bfa7595d62 --- /dev/null +++ b/packages/shared-core/src/constants/rows.ts @@ -0,0 +1,14 @@ +export const CONSTANT_INTERNAL_ROW_COLS = [ + "_id", + "_rev", + "type", + "createdAt", + "updatedAt", + "tableId", +] as const + +export const CONSTANT_EXTERNAL_ROW_COLS = ["_id", "_rev", "tableId"] as const + +export function isInternalColumnName(name: string): boolean { + return (CONSTANT_INTERNAL_ROW_COLS as readonly string[]).includes(name) +} diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 1401b75e93..3c6901e195 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -7,13 +7,18 @@ import { SearchFilters, SearchQueryFields, SearchFilterOperator, - SortDirection, SortType, FieldConstraints, + SortOrder, + RowSearchParams, + EmptyFilterOption, + SearchResponse, + Table, } from "@budibase/types" import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" import { deepGet, schema } from "./helpers" +import _ from "lodash" const HBS_REGEX = /{{([^{].*?)}}/g @@ -127,21 +132,80 @@ const cleanupQuery = (query: SearchFilters) => { * Removes a numeric prefix on field names designed to give fields uniqueness */ export const removeKeyNumbering = (key: string): string => { + return getKeyNumbering(key).key +} + +/** + * Gets the part of the keys, returning the numeric prefix and the field name + */ +export const getKeyNumbering = ( + key: string +): { prefix?: string; key: string } => { if (typeof key === "string" && key.match(/\d[0-9]*:/g) != null) { const parts = key.split(":") // remove the number - parts.shift() - return parts.join(":") + const number = parts.shift() + return { prefix: `${number}:`, key: parts.join(":") } } else { - return key + return { key } } } /** - * Builds a lucene JSON query from the filter structure generated in the builder + * Generates a splitter which can be used to split columns from a context into + * their components (number prefix, relationship column/table, column name) + */ +export class ColumnSplitter { + tableNames: string[] + tableIds: string[] + relationshipColumnNames: string[] + relationships: string[] + + constructor(tables: Table[]) { + this.tableNames = tables.map(table => table.name) + this.tableIds = tables.map(table => table._id!) + this.relationshipColumnNames = tables.flatMap(table => + Object.keys(table.schema).filter( + columnName => table.schema[columnName].type === FieldType.LINK + ) + ) + this.relationships = this.tableNames + .concat(this.tableIds) + .concat(this.relationshipColumnNames) + // sort by length - makes sure there's no mis-matches due to similarities (sub column names) + .sort((a, b) => b.length - a.length) + } + + run(key: string): { + numberPrefix?: string + relationshipPrefix?: string + column: string + } { + let { prefix, key: splitKey } = getKeyNumbering(key) + let relationship: string | undefined + for (let possibleRelationship of this.relationships) { + const withDot = `${possibleRelationship}.` + if (splitKey.startsWith(withDot)) { + const finalKeyParts = splitKey.split(withDot) + finalKeyParts.shift() + relationship = withDot + splitKey = finalKeyParts.join(".") + break + } + } + return { + numberPrefix: prefix, + relationshipPrefix: relationship, + column: splitKey, + } + } +} + +/** + * Builds a JSON query from the filter structure generated in the builder * @param filter the builder filter structure */ -export const buildLuceneQuery = (filter: SearchFilter[]) => { +export const buildQuery = (filter: SearchFilter[]) => { let query: SearchFilters = { string: {}, fuzzy: {}, @@ -246,12 +310,16 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => { query.equal = query.equal || {} query.equal[field] = true } else { - query[queryOperator] = query[queryOperator] || {} - query[queryOperator]![field] = value + query[queryOperator] = { + ...query[queryOperator], + [field]: value, + } } } else { - query[queryOperator] = query[queryOperator] || {} - query[queryOperator]![field] = value + query[queryOperator] = { + ...query[queryOperator], + [field]: value, + } } } }) @@ -259,12 +327,31 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => { return query } +export const search = ( + docs: Record[], + query: RowSearchParams +): SearchResponse> => { + let result = runQuery(docs, query.query) + if (query.sort) { + result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING) + } + let totalRows = result.length + if (query.limit) { + result = limit(result, query.limit.toString()) + } + const response: SearchResponse> = { rows: result } + if (query.countRows) { + response.totalRows = totalRows + } + return response +} + /** - * Performs a client-side lucene search on an array of data + * Performs a client-side search on an array of data * @param docs the data - * @param query the JSON lucene query + * @param query the JSON query */ -export const runLuceneQuery = (docs: any[], query?: SearchFilters) => { +export const runQuery = (docs: Record[], query: SearchFilters) => { if (!docs || !Array.isArray(docs)) { return [] } @@ -272,105 +359,170 @@ export const runLuceneQuery = (docs: any[], query?: SearchFilters) => { return docs } - // Make query consistent first query = cleanupQuery(query) - // Iterates over a set of filters and evaluates a fail function against a doc + if ( + !hasFilters(query) && + query.onEmptyFilter === EmptyFilterOption.RETURN_NONE + ) { + return [] + } + const match = ( type: SearchFilterOperator, - failFn: (docValue: any, testValue: any) => boolean + test: (docValue: any, testValue: any) => boolean ) => - (doc: any) => { - const filters = Object.entries(query![type] || {}) - for (let i = 0; i < filters.length; i++) { - const [key, testValue] = filters[i] - const docValue = deepGet(doc, removeKeyNumbering(key)) - if (failFn(docValue, testValue)) { + (doc: Record) => { + for (const [key, testValue] of Object.entries(query[type] || {})) { + const result = test(deepGet(doc, removeKeyNumbering(key)), testValue) + if (query.allOr && result) { + return true + } else if (!query.allOr && !result) { return false } } return true } - // Process a string match (fails if the value does not start with the string) const stringMatch = match( SearchFilterOperator.STRING, - (docValue: string, testValue: string) => { - return ( - !docValue || - !docValue?.toLowerCase().startsWith(testValue?.toLowerCase()) - ) + (docValue: any, testValue: any) => { + if (!(typeof docValue === "string")) { + return false + } + if (!(typeof testValue === "string")) { + return false + } + return docValue.toLowerCase().startsWith(testValue.toLowerCase()) } ) - // Process a fuzzy match (treat the same as starts with when running locally) const fuzzyMatch = match( SearchFilterOperator.FUZZY, - (docValue: string, testValue: string) => { - return ( - !docValue || - !docValue?.toLowerCase().startsWith(testValue?.toLowerCase()) - ) + (docValue: any, testValue: any) => { + if (!(typeof docValue === "string")) { + return false + } + if (!(typeof testValue === "string")) { + return false + } + return docValue.toLowerCase().includes(testValue.toLowerCase()) } ) - // Process a range match const rangeMatch = match( SearchFilterOperator.RANGE, - ( - docValue: string | number | null, - testValue: { low: number; high: number } - ) => { + (docValue: any, testValue: any) => { if (docValue == null || docValue === "") { - return true + return false } - if (!isNaN(+docValue)) { - return +docValue < testValue.low || +docValue > testValue.high + + if (_.isObject(testValue.low) && _.isEmpty(testValue.low)) { + testValue.low = undefined } - if (dayjs(docValue).isValid()) { - return ( - new Date(docValue).getTime() < new Date(testValue.low).getTime() || - new Date(docValue).getTime() > new Date(testValue.high).getTime() - ) + + if (_.isObject(testValue.high) && _.isEmpty(testValue.high)) { + testValue.high = undefined } + + if (testValue.low == null && testValue.high == null) { + return false + } + + const docNum = +docValue + if (!isNaN(docNum)) { + const lowNum = +testValue.low + const highNum = +testValue.high + if (!isNaN(lowNum) && !isNaN(highNum)) { + return docNum >= lowNum && docNum <= highNum + } else if (!isNaN(lowNum)) { + return docNum >= lowNum + } else if (!isNaN(highNum)) { + return docNum <= highNum + } + } + + const docDate = dayjs(docValue) + if (docDate.isValid()) { + const lowDate = dayjs(testValue.low || "0000-00-00T00:00:00.000Z") + const highDate = dayjs(testValue.high || "9999-00-00T00:00:00.000Z") + if (lowDate.isValid() && highDate.isValid()) { + return ( + (docDate.isAfter(lowDate) && docDate.isBefore(highDate)) || + docDate.isSame(lowDate) || + docDate.isSame(highDate) + ) + } else if (lowDate.isValid()) { + return docDate.isAfter(lowDate) || docDate.isSame(lowDate) + } else if (highDate.isValid()) { + return docDate.isBefore(highDate) || docDate.isSame(highDate) + } + } + + if (testValue.low != null && testValue.high != null) { + return docValue >= testValue.low && docValue <= testValue.high + } else if (testValue.low != null) { + return docValue >= testValue.low + } else if (testValue.high != null) { + return docValue <= testValue.high + } + return false } ) - // Process an equal match (fails if the value is different) - const equalMatch = match( - SearchFilterOperator.EQUAL, - (docValue: any, testValue: string | null) => { - return testValue != null && testValue !== "" && docValue !== testValue + // This function exists to check that either the docValue is equal to the + // testValue, or if the docValue is an object or array of objects, that the + // _id of the docValue is equal to the testValue. + const _valueMatches = (docValue: any, testValue: any) => { + if (Array.isArray(docValue)) { + for (const item of docValue) { + if (_valueMatches(item, testValue)) { + return true + } + } + return false } - ) - // Process a not-equal match (fails if the value is the same) + if ( + docValue && + typeof docValue === "object" && + typeof testValue === "string" + ) { + return docValue._id === testValue + } + + return docValue === testValue + } + + const not = + (f: (...args: T) => boolean) => + (...args: T): boolean => + !f(...args) + + const equalMatch = match(SearchFilterOperator.EQUAL, _valueMatches) const notEqualMatch = match( SearchFilterOperator.NOT_EQUAL, - (docValue: any, testValue: string | null) => { - return testValue != null && testValue !== "" && docValue === testValue - } + not(_valueMatches) ) - // Process an empty match (fails if the value is not empty) - const emptyMatch = match( - SearchFilterOperator.EMPTY, - (docValue: string | null) => { - return docValue != null && docValue !== "" + const _empty = (docValue: any) => { + if (typeof docValue === "string") { + return docValue === "" } - ) - - // Process a not-empty match (fails is the value is empty) - const notEmptyMatch = match( - SearchFilterOperator.NOT_EMPTY, - (docValue: string | null) => { - return docValue == null || docValue === "" + if (Array.isArray(docValue)) { + return docValue.length === 0 } - ) + if (typeof docValue === "object") { + return Object.keys(docValue).length === 0 + } + return docValue == null + } + + const emptyMatch = match(SearchFilterOperator.EMPTY, _empty) + const notEmptyMatch = match(SearchFilterOperator.NOT_EMPTY, not(_empty)) - // Process an includes match (fails if the value is not included) const oneOf = match( SearchFilterOperator.ONE_OF, (docValue: any, testValue: any) => { @@ -380,61 +532,92 @@ export const runLuceneQuery = (docs: any[], query?: SearchFilters) => { testValue = testValue.map((item: string) => parseFloat(item)) } } - return !testValue?.includes(docValue) + + if (!Array.isArray(testValue)) { + return false + } + + return testValue.some(item => _valueMatches(docValue, item)) } ) - const containsAny = match( - SearchFilterOperator.CONTAINS_ANY, - (docValue: any, testValue: any) => { - return !docValue?.includes(...testValue) + const _contains = + (f: "some" | "every") => (docValue: any, testValue: any) => { + if (!Array.isArray(docValue)) { + return false + } + + if (typeof testValue === "string") { + testValue = testValue.split(",") + if (typeof docValue[0] === "number") { + testValue = testValue.map((item: string) => parseFloat(item)) + } + } + + if (!Array.isArray(testValue)) { + return false + } + + if (testValue.length === 0) { + return true + } + + return testValue[f](item => _valueMatches(docValue, item)) } - ) const contains = match( SearchFilterOperator.CONTAINS, - (docValue: string | any[], testValue: any[]) => { - return !testValue?.every((item: any) => docValue?.includes(item)) + (docValue: any, testValue: any) => { + if (Array.isArray(testValue) && testValue.length === 0) { + return true + } + return _contains("every")(docValue, testValue) } ) - const notContains = match( SearchFilterOperator.NOT_CONTAINS, - (docValue: string | any[], testValue: any[]) => { - return testValue?.every((item: any) => docValue?.includes(item)) + (docValue: any, testValue: any) => { + // Not sure if this is logically correct, but at the time this code was + // written the search endpoint behaved this way and we wanted to make this + // local search match its behaviour, so we had to do this. + if (Array.isArray(testValue) && testValue.length === 0) { + return true + } + return not(_contains("every"))(docValue, testValue) } ) + const containsAny = match( + SearchFilterOperator.CONTAINS_ANY, + _contains("some") + ) - const docMatch = (doc: any) => { - const filterFunctions: Record boolean> = - { - string: stringMatch, - fuzzy: fuzzyMatch, - range: rangeMatch, - equal: equalMatch, - notEqual: notEqualMatch, - empty: emptyMatch, - notEmpty: notEmptyMatch, - oneOf: oneOf, - contains: contains, - containsAny: containsAny, - notContains: notContains, - } + const docMatch = (doc: Record) => { + const filterFunctions = { + string: stringMatch, + fuzzy: fuzzyMatch, + range: rangeMatch, + equal: equalMatch, + notEqual: notEqualMatch, + empty: emptyMatch, + notEmpty: notEmptyMatch, + oneOf: oneOf, + contains: contains, + containsAny: containsAny, + notContains: notContains, + } - const activeFilterKeys: SearchFilterOperator[] = Object.entries(query || {}) + const results = Object.entries(query || {}) .filter( - ([key, value]: [string, any]) => + ([key, value]) => !["allOr", "onEmptyFilter"].includes(key) && value && - Object.keys(value as Record).length > 0 + Object.keys(value).length > 0 ) - .map(([key]) => key as any) + .map(([key]) => { + return filterFunctions[key as SearchFilterOperator]?.(doc) ?? false + }) - const results: boolean[] = activeFilterKeys.map(filterKey => { - return filterFunctions[filterKey]?.(doc) ?? false - }) - - if (query!.allOr) { + if (query.allOr) { return results.some(result => result === true) } else { return results.every(result => result === true) @@ -451,27 +634,38 @@ export const runLuceneQuery = (docs: any[], query?: SearchFilters) => { * @param sortOrder the sort order ("ascending" or "descending") * @param sortType the type of sort ("string" or "number") */ -export const luceneSort = ( +export const sort = ( docs: any[], sort: string, - sortOrder: SortDirection, + sortOrder: SortOrder, sortType = SortType.STRING ) => { if (!sort || !sortOrder || !sortType) { return docs } - const parse = - sortType === "string" ? (x: any) => `${x}` : (x: string) => parseFloat(x) + + const parse = (x: any) => { + if (x == null) { + return x + } + if (sortType === "string") { + return `${x}` + } + return parseFloat(x) + } + return docs .slice() .sort((a: { [x: string]: any }, b: { [x: string]: any }) => { const colA = parse(a[sort]) const colB = parse(b[sort]) + + const result = colB == null || colA > colB ? 1 : -1 if (sortOrder.toLowerCase() === "descending") { - return colA > colB ? -1 : 1 - } else { - return colA > colB ? 1 : -1 + return result * -1 } + + return result }) } @@ -481,7 +675,7 @@ export const luceneSort = ( * @param docs the data * @param limit the number of docs to limit to */ -export const luceneLimit = (docs: any[], limit: string) => { +export const limit = (docs: any[], limit: string) => { const numLimit = parseFloat(limit) if (isNaN(numLimit)) { return docs diff --git a/packages/shared-core/src/helpers/schema.ts b/packages/shared-core/src/helpers/schema.ts index ad4c237247..caf562a8cb 100644 --- a/packages/shared-core/src/helpers/schema.ts +++ b/packages/shared-core/src/helpers/schema.ts @@ -1,5 +1,6 @@ import { BBReferenceFieldSubType, + FieldConstraints, FieldSchema, FieldType, } from "@budibase/types" @@ -16,3 +17,12 @@ export function isDeprecatedSingleUserColumn( schema.constraints?.type !== "array" return result } + +export function isRequired(constraints: FieldConstraints | undefined) { + const isRequired = + !!constraints && + ((typeof constraints.presence !== "boolean" && + constraints.presence?.allowEmpty === false) || + constraints.presence === true) + return isRequired +} diff --git a/packages/server/src/utilities/tests/schema.spec.ts b/packages/shared-core/src/helpers/tests/schema.spec.ts similarity index 100% rename from packages/server/src/utilities/tests/schema.spec.ts rename to packages/shared-core/src/helpers/tests/schema.spec.ts diff --git a/packages/shared-core/src/table.ts b/packages/shared-core/src/table.ts index 7706b78037..8fd7909b18 100644 --- a/packages/shared-core/src/table.ts +++ b/packages/shared-core/src/table.ts @@ -1,4 +1,5 @@ -import { FieldType } from "@budibase/types" +import { FieldType, Table } from "@budibase/types" +import { CONSTANT_INTERNAL_ROW_COLS } from "./constants" const allowDisplayColumnByType: Record = { [FieldType.STRING]: true, @@ -51,3 +52,27 @@ export function canBeDisplayColumn(type: FieldType): boolean { export function canBeSortColumn(type: FieldType): boolean { return !!allowSortColumnByType[type] } + +export function findDuplicateInternalColumns(table: Table): string[] { + // maintains the case of keys + const casedKeys = Object.keys(table.schema) + // get the column names + const uncasedKeys = casedKeys.map(colName => colName.toLowerCase()) + // there are duplicates + const set = new Set(uncasedKeys) + let duplicates: string[] = [] + if (set.size !== uncasedKeys.length) { + for (let key of set.keys()) { + const count = uncasedKeys.filter(name => name === key).length + if (count > 1) { + duplicates.push(key) + } + } + } + for (let internalColumn of CONSTANT_INTERNAL_ROW_COLS) { + if (casedKeys.find(key => key === internalColumn)) { + duplicates.push(internalColumn) + } + } + return duplicates +} diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts deleted file mode 100644 index f188c5f951..0000000000 --- a/packages/shared-core/src/tests/filters.test.ts +++ /dev/null @@ -1,415 +0,0 @@ -import { - SearchFilters, - SearchFilterOperator, - FieldType, - SearchFilter, -} from "@budibase/types" -import { buildLuceneQuery, runLuceneQuery } from "../filters" - -describe("runLuceneQuery", () => { - const docs = [ - { - order_id: 1, - customer_id: 259, - order_status: 4, - order_date: "2016-01-01T00:00:00.000Z", - required_date: "2016-01-03T00:00:00.000Z", - shipped_date: "2016-01-03T00:00:00.000Z", - store_id: 1, - staff_id: 2, - description: "Large box", - label: undefined, - }, - { - order_id: 2, - customer_id: 1212, - order_status: 4, - order_date: "2016-01-05T00:00:00.000Z", - required_date: "2016-01-04T00:00:00.000Z", - shipped_date: "2016-01-03T00:00:00.000Z", - store_id: 2, - staff_id: 6, - description: "Small box", - label: "FRAGILE", - }, - { - order_id: 3, - customer_id: 523, - order_status: 5, - order_date: "2016-01-12T00:00:00.000Z", - required_date: "2016-01-05T00:00:00.000Z", - shipped_date: "2016-01-03T00:00:00.000Z", - store_id: 2, - staff_id: 7, - description: "Heavy box", - label: "HEAVY", - }, - ] - - function buildQuery(filters: { [filterKey: string]: any }): SearchFilters { - const query: SearchFilters = { - string: {}, - fuzzy: {}, - range: {}, - equal: {}, - notEqual: {}, - empty: {}, - notEmpty: {}, - contains: {}, - notContains: {}, - oneOf: {}, - containsAny: {}, - allOr: false, - } - - for (const filterKey in filters) { - query[filterKey as SearchFilterOperator] = filters[filterKey] - } - - return query - } - - it("should return input docs if no search query is provided", () => { - expect(runLuceneQuery(docs)).toBe(docs) - }) - - it("should return matching rows for equal filter", () => { - const query = buildQuery({ - equal: { order_status: 4 }, - }) - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) - }) - - it("should return matching row for notEqual filter", () => { - const query = buildQuery({ - notEqual: { order_status: 4 }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) - }) - - it("should return starts with matching rows for fuzzy and string filters", () => { - expect( - runLuceneQuery( - docs, - buildQuery({ - fuzzy: { description: "sm" }, - }) - ).map(row => row.description) - ).toEqual(["Small box"]) - expect( - runLuceneQuery( - docs, - buildQuery({ - string: { description: "SM" }, - }) - ).map(row => row.description) - ).toEqual(["Small box"]) - }) - - it("should return rows within a range filter", () => { - const query = buildQuery({ - range: { - customer_id: { - low: 500, - high: 1000, - }, - }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) - }) - - it("should return rows with numeric strings within a range filter", () => { - const query = buildQuery({ - range: { - customer_id: { - low: "500", - high: "1000", - }, - }, - }) - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) - }) - - it("should return rows with ISO date strings within a range filter", () => { - const query = buildQuery({ - range: { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", - }, - }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) - }) - - it("should return return all docs if an invalid doc value is passed into a range filter", async () => { - const docs = [ - { - order_id: 4, - customer_id: 1758, - order_status: 5, - order_date: "{{ Binding.INVALID }}", - required_date: "2017-03-05T00:00:00.000Z", - shipped_date: "2017-03-03T00:00:00.000Z", - store_id: 2, - staff_id: 7, - description: undefined, - label: "", - }, - ] - - const query = buildQuery({ - range: { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", - }, - }, - }) - - expect(runLuceneQuery(docs, query)).toEqual(docs) - }) - - it("should return rows with matches on empty filter", () => { - const query = buildQuery({ - empty: { - label: null, - }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1]) - }) - - it("should return rows with matches on notEmpty filter", () => { - const query = buildQuery({ - notEmpty: { - label: null, - }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3]) - }) - - it.each([[523, 259], "523,259"])( - "should return rows with matches on numeric oneOf filter", - input => { - const query = buildQuery({ - oneOf: { - customer_id: input, - }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([ - 259, 523, - ]) - } - ) - - it.each([ - [false, []], - [true, [1, 2, 3]], - ])("should return %s if allOr is %s ", (allOr, expectedResult) => { - const query = buildQuery({ - allOr, - oneOf: { staff_id: [10] }, - contains: { description: ["box"] }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual( - expectedResult - ) - }) - - it("should return matching results if allOr is true and only one filter matches with different operands", () => { - const query = buildQuery({ - allOr: true, - equal: { order_status: 4 }, - oneOf: { label: ["FRAGILE"] }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) - }) - - it("should handle when a value is null or undefined", () => { - const query = buildQuery({ - allOr: true, - equal: { order_status: null }, - oneOf: { label: ["FRAGILE"] }, - }) - - expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) - }) -}) - -describe("buildLuceneQuery", () => { - it("should return a basic search query template if the input is not an array", () => { - const filter: any = "NOT_AN_ARRAY" - expect(buildLuceneQuery(filter)).toEqual({ - string: {}, - fuzzy: {}, - range: {}, - equal: {}, - notEqual: {}, - empty: {}, - notEmpty: {}, - contains: {}, - notContains: {}, - oneOf: {}, - containsAny: {}, - }) - }) - - it("should parseFloat if the type is a number, but the value is a numeric string", () => { - const filter: SearchFilter[] = [ - { - operator: SearchFilterOperator.EQUAL, - field: "customer_id", - type: FieldType.NUMBER, - value: "1212", - }, - { - operator: SearchFilterOperator.ONE_OF, - field: "customer_id", - type: FieldType.NUMBER, - value: "1000,1212,3400", - }, - ] - expect(buildLuceneQuery(filter)).toEqual({ - string: {}, - fuzzy: {}, - range: {}, - equal: { - customer_id: 1212, - }, - notEqual: {}, - empty: {}, - notEmpty: {}, - contains: {}, - notContains: {}, - oneOf: { - customer_id: [1000, 1212, 3400], - }, - containsAny: {}, - }) - }) - - it("should not parseFloat if the type is a number, but the value is a handlebars binding string", () => { - const filter: SearchFilter[] = [ - { - operator: SearchFilterOperator.EQUAL, - field: "customer_id", - type: FieldType.NUMBER, - value: "{{ customer_id }}", - }, - { - operator: SearchFilterOperator.ONE_OF, - field: "customer_id", - type: FieldType.NUMBER, - value: "{{ list_of_customer_ids }}", - }, - ] - expect(buildLuceneQuery(filter)).toEqual({ - string: {}, - fuzzy: {}, - range: {}, - equal: { - customer_id: "{{ customer_id }}", - }, - notEqual: {}, - empty: {}, - notEmpty: {}, - contains: {}, - notContains: {}, - oneOf: { - customer_id: "{{ list_of_customer_ids }}", - }, - containsAny: {}, - }) - }) - - it("should cast string to boolean if the type is boolean", () => { - const filter: SearchFilter[] = [ - { - operator: SearchFilterOperator.EQUAL, - field: "a", - type: FieldType.BOOLEAN, - value: "not_true", - }, - { - operator: SearchFilterOperator.NOT_EQUAL, - field: "b", - type: FieldType.BOOLEAN, - value: "not_true", - }, - { - operator: SearchFilterOperator.EQUAL, - field: "c", - type: FieldType.BOOLEAN, - value: "true", - }, - ] - expect(buildLuceneQuery(filter)).toEqual({ - string: {}, - fuzzy: {}, - range: {}, - equal: { - b: true, - c: true, - }, - notEqual: { - a: true, - }, - empty: {}, - notEmpty: {}, - contains: {}, - notContains: {}, - oneOf: {}, - containsAny: {}, - }) - }) - - it("should split the string for contains operators", () => { - const filter: SearchFilter[] = [ - { - operator: SearchFilterOperator.CONTAINS, - field: "description", - type: FieldType.ARRAY, - value: "Large box,Heavy box,Small box", - }, - { - operator: SearchFilterOperator.NOT_CONTAINS, - field: "description", - type: FieldType.ARRAY, - value: "Large box,Heavy box,Small box", - }, - { - operator: SearchFilterOperator.CONTAINS_ANY, - field: "description", - type: FieldType.ARRAY, - value: "Large box,Heavy box,Small box", - }, - ] - expect(buildLuceneQuery(filter)).toEqual({ - string: {}, - fuzzy: {}, - range: {}, - equal: {}, - notEqual: {}, - empty: {}, - notEmpty: {}, - contains: { - description: ["Large box", "Heavy box", "Small box"], - }, - notContains: { - description: ["Large box", "Heavy box", "Small box"], - }, - oneOf: {}, - containsAny: { - description: ["Large box", "Heavy box", "Small box"], - }, - }) - }) -}) diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json index 8cf8d92692..238182a5a4 100644 --- a/packages/string-templates/package.json +++ b/packages/string-templates/package.json @@ -21,7 +21,7 @@ "scripts": { "build": "tsc --emitDeclarationOnly && rollup -c", "dev": "rollup -cw", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "test": "jest", "manifest": "ts-node ./scripts/gen-collection-info.ts" }, @@ -45,6 +45,6 @@ "rollup-plugin-node-resolve": "^5.2.0", "rollup-plugin-terser": "^7.0.2", "ts-jest": "29.1.1", - "typescript": "5.2.2" + "typescript": "5.5.2" } } diff --git a/packages/string-templates/src/helpers/javascript.ts b/packages/string-templates/src/helpers/javascript.ts index 931cc46dc7..3e16d8a07b 100644 --- a/packages/string-templates/src/helpers/javascript.ts +++ b/packages/string-templates/src/helpers/javascript.ts @@ -33,7 +33,12 @@ const removeSquareBrackets = (value: string) => { // Our context getter function provided to JS code as $. // Extracts a value from context. const getContextValue = (path: string, context: any) => { + const literalStringRegex = /^(["'`]).*\1$/ let data = context + // check if it's a literal string - just return path if its quoted + if (literalStringRegex.test(path)) { + return path.substring(1, path.length - 1) + } path.split(".").forEach(key => { if (data == null || typeof data !== "object") { return null diff --git a/packages/string-templates/src/processors/preprocessor.ts b/packages/string-templates/src/processors/preprocessor.ts index 010c259e12..5e96336e32 100644 --- a/packages/string-templates/src/processors/preprocessor.ts +++ b/packages/string-templates/src/processors/preprocessor.ts @@ -7,6 +7,7 @@ export const PreprocessorNames = { SWAP_TO_DOT: "swap-to-dot-notation", FIX_FUNCTIONS: "fix-functions", FINALISE: "finalise", + NORMALIZE_SPACES: "normalize-spaces", } class Preprocessor { @@ -50,6 +51,9 @@ export const processors = [ return statement }), + new Preprocessor(PreprocessorNames.NORMALIZE_SPACES, (statement: string) => { + return statement.replace(/{{(\s{2,})/g, "{{ ") + }), new Preprocessor( PreprocessorNames.FINALISE, (statement: string, opts: { noHelpers: any }) => { diff --git a/packages/string-templates/test/basic.spec.ts b/packages/string-templates/test/basic.spec.ts index ddea54c2bf..24a19131f4 100644 --- a/packages/string-templates/test/basic.spec.ts +++ b/packages/string-templates/test/basic.spec.ts @@ -320,3 +320,21 @@ describe("should leave HBS blocks if not found using option", () => { expect(output).toBe("{{ a }}, 1") }) }) + +describe("check multiple space behaviour", () => { + it("should remove whitespace and use the helper correctly", async () => { + const output = await processString("{{ add num1 num2 }}", { + num1: 1, + num2: 2, + }) + expect(output).toEqual("3") + }) + + it("should ensure that whitespace within a string is respected", async () => { + const output = await processString("{{ trimRight 'test string ' }}", { + num1: 1, + num2: 2, + }) + expect(output).toEqual("test string") + }) +}) diff --git a/packages/string-templates/test/javascript.spec.ts b/packages/string-templates/test/javascript.spec.ts index cb2f765007..99e6ee122a 100644 --- a/packages/string-templates/test/javascript.spec.ts +++ b/packages/string-templates/test/javascript.spec.ts @@ -149,4 +149,11 @@ describe("Javascript", () => { expect(output).toMatch(UUID_REGEX) }) }) + + describe("JS literal strings", () => { + it("should be able to handle a literal string that is quoted (like role IDs)", () => { + const output = processJS(`return $("'Custom'")`) + expect(output).toBe("Custom") + }) + }) }) diff --git a/packages/types/package.json b/packages/types/package.json index f4c7b13344..c44fff971e 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -11,7 +11,7 @@ "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput", - "check:types": "tsc -p tsconfig.json --noEmit --paths null" + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020" }, "jest": {}, "devDependencies": { @@ -20,7 +20,7 @@ "@types/pouchdb": "6.4.0", "@types/redlock": "4.0.7", "rimraf": "3.0.2", - "typescript": "5.2.2" + "typescript": "5.5.2" }, "dependencies": { "scim-patch": "^0.8.1" diff --git a/packages/types/src/api/web/app/rows.ts b/packages/types/src/api/web/app/rows.ts index 5d49f01bfc..c120af0628 100644 --- a/packages/types/src/api/web/app/rows.ts +++ b/packages/types/src/api/web/app/rows.ts @@ -25,6 +25,7 @@ export interface SearchViewRowRequest | "bookmark" | "paginate" | "query" + | "countRows" > {} export interface SearchRowResponse { diff --git a/packages/types/src/api/web/system/index.ts b/packages/types/src/api/web/system/index.ts index d19c9296c9..8820bf5cd1 100644 --- a/packages/types/src/api/web/system/index.ts +++ b/packages/types/src/api/web/system/index.ts @@ -1 +1,2 @@ export * from "./environment" +export * from "./status" diff --git a/packages/types/src/api/web/system/status.ts b/packages/types/src/api/web/system/status.ts new file mode 100644 index 0000000000..3d64cc4d97 --- /dev/null +++ b/packages/types/src/api/web/system/status.ts @@ -0,0 +1,11 @@ +export type SystemStatusResponse = { + passing?: boolean + checks?: { + login: boolean + search: boolean + } + health?: { + passing: boolean + } + version?: string +} diff --git a/packages/types/src/api/web/user.ts b/packages/types/src/api/web/user.ts index f59bda133b..471ca86616 100644 --- a/packages/types/src/api/web/user.ts +++ b/packages/types/src/api/web/user.ts @@ -45,7 +45,12 @@ export interface InviteUserRequest { userInfo: any } +export interface DeleteInviteUserRequest { + code: string +} + export type InviteUsersRequest = InviteUserRequest[] +export type DeleteInviteUsersRequest = DeleteInviteUserRequest[] export interface InviteUsersResponse { successful: { email: string }[] diff --git a/packages/types/src/documents/account/account.ts b/packages/types/src/documents/account/account.ts index 239d845722..61792a7f47 100644 --- a/packages/types/src/documents/account/account.ts +++ b/packages/types/src/documents/account/account.ts @@ -42,10 +42,7 @@ export interface Account extends CreateAccount { verified: boolean verificationSent: boolean // licensing - tier: string // deprecated planType?: PlanType - /** @deprecated */ - planTier?: number license?: License installId?: string installTenantId?: string diff --git a/packages/types/src/documents/app/automation.ts b/packages/types/src/documents/app/automation.ts index 6d1753dc28..6ea62ffffb 100644 --- a/packages/types/src/documents/app/automation.ts +++ b/packages/types/src/documents/app/automation.ts @@ -2,6 +2,8 @@ import { Document } from "../document" import { EventEmitter } from "events" import { User } from "../global" import { ReadStream } from "fs" +import { Row } from "./row" +import { Table } from "./table" export enum AutomationIOType { OBJECT = "object", @@ -142,7 +144,7 @@ interface BaseIOStructure { required?: string[] } -interface InputOutputBlock { +export interface InputOutputBlock { properties: { [key: string]: BaseIOStructure } @@ -245,10 +247,17 @@ export type AutomationAttachment = { export type AutomationAttachmentContent = { filename: string - content: ReadStream | NodeJS.ReadableStream | ReadableStream + content: ReadStream | NodeJS.ReadableStream } export type BucketedContent = AutomationAttachmentContent & { bucket: string path: string } + +export type UpdatedRowEventEmitter = { + row: Row + oldRow: Row + table: Table + appId: string +} diff --git a/packages/types/src/documents/app/sqlite.ts b/packages/types/src/documents/app/sqlite.ts index 5636fef15b..516669bd59 100644 --- a/packages/types/src/documents/app/sqlite.ts +++ b/packages/types/src/documents/app/sqlite.ts @@ -30,4 +30,7 @@ export interface SQLiteDefinition { } } -export type PreSaveSQLiteDefinition = Omit +export interface PreSaveSQLiteDefinition + extends Omit { + _rev?: string +} diff --git a/packages/types/src/documents/global/index.ts b/packages/types/src/documents/global/index.ts index b728439dd6..6784f2638c 100644 --- a/packages/types/src/documents/global/index.ts +++ b/packages/types/src/documents/global/index.ts @@ -7,3 +7,4 @@ export * from "./schedule" export * from "./templates" export * from "./environmentVariables" export * from "./auditLogs" +export * from "./tenantInfo" diff --git a/packages/types/src/documents/global/tenantInfo.ts b/packages/types/src/documents/global/tenantInfo.ts new file mode 100644 index 0000000000..4c8837cf2a --- /dev/null +++ b/packages/types/src/documents/global/tenantInfo.ts @@ -0,0 +1,15 @@ +import { Hosting } from "../../sdk" +import { Document } from "../document" + +export interface TenantInfo extends Document { + owner: { + email: string + password?: string + ssoId?: string + givenName?: string + familyName?: string + budibaseUserId?: string + } + tenantId: string + hosting: Hosting +} diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 77e4877dfa..bdad9c4825 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -8,7 +8,9 @@ export enum Operation { READ = "READ", UPDATE = "UPDATE", DELETE = "DELETE", + COUNT = "COUNT", BULK_CREATE = "BULK_CREATE", + BULK_UPSERT = "BULK_UPSERT", CREATE_TABLE = "CREATE_TABLE", UPDATE_TABLE = "UPDATE_TABLE", DELETE_TABLE = "DELETE_TABLE", @@ -20,13 +22,9 @@ export const RowOperations = [ Operation.UPDATE, Operation.DELETE, Operation.BULK_CREATE, + Operation.BULK_UPSERT, ] -export enum SortDirection { - ASCENDING = "ASCENDING", - DESCENDING = "DESCENDING", -} - export enum QueryType { SQL = "sql", JSON = "json", @@ -191,7 +189,7 @@ export interface Schema { } // return these when an operation occurred but we got no response -enum DSPlusOperation { +export enum DSPlusOperation { CREATE = "create", READ = "read", UPDATE = "update", @@ -201,6 +199,7 @@ enum DSPlusOperation { export type DatasourcePlusQueryResponse = | Row[] | Record[] + | { total: number }[] | void export interface DatasourcePlus extends IntegrationBase { diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 7ad740ad05..63c37195b7 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -165,3 +165,13 @@ export interface Database { deleteIndex(...args: any[]): Promise getIndexes(...args: any[]): Promise } + +export interface DBError extends Error { + status: number + statusCode: number + reason: string + name: string + errid: string + error: string + description: string +} diff --git a/packages/types/src/sdk/row.ts b/packages/types/src/sdk/row.ts index 7f3fc1f391..b0b137034b 100644 --- a/packages/types/src/sdk/row.ts +++ b/packages/types/src/sdk/row.ts @@ -17,6 +17,7 @@ export interface SearchParams { fields?: string[] indexer?: () => Promise rows?: Row[] + countRows?: boolean } // when searching for rows we want a more extensive search type that requires certain properties diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index dc8ea09bcc..c40f1c3b84 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -1,6 +1,6 @@ -import { Operation, SortDirection } from "./datasources" +import { Operation } from "./datasources" import { Row, Table, DocumentType } from "../documents" -import { SortType } from "../api" +import { SortOrder, SortType } from "../api" import { Knex } from "knex" export enum SearchFilterOperator { @@ -77,7 +77,7 @@ export type SearchQueryFields = Omit export interface SortJson { [key: string]: { - direction: SortDirection + direction: SortOrder type?: SortType } } @@ -85,6 +85,7 @@ export interface SortJson { export interface PaginationJson { limit: number page?: string | number + offset?: number } export interface RenameColumn { @@ -121,6 +122,8 @@ export interface QueryJson { table: Table tables?: Record renamed?: RenameColumn + // can specify something that columns could be prefixed with + columnPrefix?: string } extra?: { idFilter?: SearchFilters diff --git a/packages/worker/package.json b/packages/worker/package.json index 95410668da..02f1181e91 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -15,7 +15,7 @@ "prebuild": "rimraf dist/", "build": "node ../../scripts/build.js", "postbuild": "copyfiles -f ../../yarn.lock ./dist/", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "run:docker": "node dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js", @@ -91,7 +91,7 @@ "rimraf": "3.0.2", "supertest": "6.3.3", "timekeeper": "2.2.0", - "typescript": "5.2.2", + "typescript": "5.5.2", "update-dotenv": "1.1.1" }, "nx": { diff --git a/packages/worker/scripts/dev/manage.js b/packages/worker/scripts/dev/manage.js index acab87eb5e..e120b93881 100644 --- a/packages/worker/scripts/dev/manage.js +++ b/packages/worker/scripts/dev/manage.js @@ -31,6 +31,7 @@ async function init() { HTTP_LOGGING: "0", VERSION: "0.0.0+local", PASSWORD_MIN_LENGTH: "1", + SQS_SEARCH_ENABLE: "1", } config = { ...config, ...existingConfig } diff --git a/packages/worker/src/api/controllers/global/tenant.ts b/packages/worker/src/api/controllers/global/tenant.ts new file mode 100644 index 0000000000..8b5ae6d528 --- /dev/null +++ b/packages/worker/src/api/controllers/global/tenant.ts @@ -0,0 +1,14 @@ +import { tenancy } from "@budibase/backend-core" +import { TenantInfo, Ctx } from "@budibase/types" + +export const save = async (ctx: Ctx) => { + const response = await tenancy.saveTenantInfo(ctx.request.body) + ctx.body = { + _id: response.id, + _rev: response.rev, + } +} + +export const get = async (ctx: Ctx) => { + ctx.body = await tenancy.getTenantInfo(ctx.params.id) +} diff --git a/packages/worker/src/api/controllers/global/users.ts b/packages/worker/src/api/controllers/global/users.ts index 46bf13284e..cd69281f56 100644 --- a/packages/worker/src/api/controllers/global/users.ts +++ b/packages/worker/src/api/controllers/global/users.ts @@ -10,6 +10,8 @@ import { CreateAdminUserRequest, CreateAdminUserResponse, Ctx, + DeleteInviteUserRequest, + DeleteInviteUsersRequest, InviteUserRequest, InviteUsersRequest, InviteUsersResponse, @@ -335,6 +337,20 @@ export const inviteMultiple = async (ctx: Ctx) => { ctx.body = await userSdk.invite(ctx.request.body) } +export const removeMultipleInvites = async ( + ctx: Ctx +) => { + const inviteCodesToRemove = ctx.request.body.map( + (invite: DeleteInviteUserRequest) => invite.code + ) + for (const code of inviteCodesToRemove) { + await cache.invite.deleteCode(code) + } + ctx.body = { + message: "User invites successfully removed.", + } +} + export const checkInvite = async (ctx: any) => { const { code } = ctx.params let invite diff --git a/packages/worker/src/api/controllers/system/status.ts b/packages/worker/src/api/controllers/system/status.ts index b763a67d4f..662ca05c48 100644 --- a/packages/worker/src/api/controllers/system/status.ts +++ b/packages/worker/src/api/controllers/system/status.ts @@ -1,16 +1,24 @@ -import { accounts } from "@budibase/backend-core" +import { accounts, env as coreEnv } from "@budibase/backend-core" +import { Ctx, SystemStatusResponse } from "@budibase/types" import env from "../../../environment" -import { BBContext } from "@budibase/types" -export const fetch = async (ctx: BBContext) => { +export const fetch = async (ctx: Ctx) => { + let status: SystemStatusResponse | undefined if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { - const status = await accounts.getStatus() - ctx.body = status - } else { - ctx.body = { + status = await accounts.getStatus() + } + + if (!status) { + status = { health: { passing: true, }, } } + + if (coreEnv.VERSION) { + status.version = coreEnv.VERSION + } + + ctx.body = status } diff --git a/packages/worker/src/api/index.ts b/packages/worker/src/api/index.ts index 82495df4ee..08c65b98d4 100644 --- a/packages/worker/src/api/index.ts +++ b/packages/worker/src/api/index.ts @@ -76,6 +76,10 @@ const PUBLIC_ENDPOINTS = [ route: "/api/global/users/invite", method: "GET", }, + { + route: "/api/global/tenant", + method: "POST", + }, ] const NO_TENANCY_ENDPOINTS = [ @@ -121,6 +125,14 @@ const NO_TENANCY_ENDPOINTS = [ route: "/api/global/users/invite/:code", method: "GET", }, + { + route: "/api/global/tenant", + method: "POST", + }, + { + route: "/api/global/tenant/:id", + method: "GET", + }, ] // most public endpoints are gets, but some are posts diff --git a/packages/worker/src/api/routes/global/tenant.ts b/packages/worker/src/api/routes/global/tenant.ts new file mode 100644 index 0000000000..18b7058c34 --- /dev/null +++ b/packages/worker/src/api/routes/global/tenant.ts @@ -0,0 +1,36 @@ +import Router from "@koa/router" +import Joi from "joi" +import { auth } from "@budibase/backend-core" +import * as controller from "../../controllers/global/tenant" +import cloudRestricted from "../../../middleware/cloudRestricted" + +const router: Router = new Router() +const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("") + +function buildTenantInfoValidation() { + return auth.joiValidator.body( + Joi.object({ + owner: Joi.object({ + email: Joi.string().required(), + password: OPTIONAL_STRING, + ssoId: OPTIONAL_STRING, + givenName: OPTIONAL_STRING, + familyName: OPTIONAL_STRING, + budibaseUserId: OPTIONAL_STRING, + }).required(), + hosting: Joi.string().required(), + tenantId: Joi.string().required(), + }).required() + ) +} + +router + .post( + "/api/global/tenant", + cloudRestricted, + buildTenantInfoValidation(), + controller.save + ) + .get("/api/global/tenant/:id", controller.get) + +export default router diff --git a/packages/worker/src/api/routes/global/tests/tenant.spec.ts b/packages/worker/src/api/routes/global/tests/tenant.spec.ts new file mode 100644 index 0000000000..e039b4139b --- /dev/null +++ b/packages/worker/src/api/routes/global/tests/tenant.spec.ts @@ -0,0 +1,48 @@ +import { Hosting, TenantInfo } from "@budibase/types" +import { TestConfiguration } from "../../../../tests" +import { tenancy as _tenancy } from "@budibase/backend-core" + +const tenancy = jest.mocked(_tenancy) + +describe("/api/global/tenant", () => { + const config = new TestConfiguration() + + beforeAll(async () => { + await config.beforeAll() + }) + + afterAll(async () => { + await config.afterAll() + }) + + beforeEach(() => { + jest.clearAllMocks() + }) + + describe("POST /api/global/tenant", () => { + it("should save the tenantInfo", async () => { + tenancy.saveTenantInfo = jest.fn().mockImplementation(async () => ({ + id: "DOC_ID", + ok: true, + rev: "DOC_REV", + })) + const tenantInfo: TenantInfo = { + owner: { + email: "test@example.com", + password: "PASSWORD", + ssoId: "SSO_ID", + givenName: "Jane", + familyName: "Doe", + budibaseUserId: "USER_ID", + }, + tenantId: "tenant123", + hosting: Hosting.CLOUD, + } + const response = await config.api.tenants.saveTenantInfo(tenantInfo) + + expect(_tenancy.saveTenantInfo).toHaveBeenCalledTimes(1) + expect(_tenancy.saveTenantInfo).toHaveBeenCalledWith(tenantInfo) + expect(response.text).toEqual('{"_id":"DOC_ID","_rev":"DOC_REV"}') + }) + }) +}) diff --git a/packages/worker/src/api/routes/global/users.ts b/packages/worker/src/api/routes/global/users.ts index b40c491830..d5dfa47923 100644 --- a/packages/worker/src/api/routes/global/users.ts +++ b/packages/worker/src/api/routes/global/users.ts @@ -4,7 +4,6 @@ import { auth } from "@budibase/backend-core" import Joi from "joi" import cloudRestricted from "../../../middleware/cloudRestricted" import { users } from "../validation" -import * as selfController from "../../controllers/global/self" const router: Router = new Router() const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("") @@ -108,6 +107,11 @@ router buildInviteMultipleValidation(), controller.inviteMultiple ) + .post( + "/api/global/users/multi/invite/delete", + auth.builderOrAdmin, + controller.removeMultipleInvites + ) // non-global endpoints .get("/api/global/users/invite/:code", controller.checkInvite) @@ -135,12 +139,5 @@ router .get("/api/global/users/tenant/:id", controller.tenantUserLookup) // global endpoint but needs to come at end (blocks other endpoints otherwise) .get("/api/global/users/:id", auth.builderOrAdmin, controller.find) - // DEPRECATED - use new versions with self API - .get("/api/global/users/self", selfController.getSelf) - .post( - "/api/global/users/self", - users.buildUserSaveValidation(), - selfController.updateSelf - ) export default router diff --git a/packages/worker/src/api/routes/index.ts b/packages/worker/src/api/routes/index.ts index e6cacf110f..2eb4b5cd5d 100644 --- a/packages/worker/src/api/routes/index.ts +++ b/packages/worker/src/api/routes/index.ts @@ -1,6 +1,7 @@ import Router from "@koa/router" import { api as pro } from "@budibase/pro" import userRoutes from "./global/users" +import tenantRoutes from "./global/tenant" import configRoutes from "./global/configs" import workspaceRoutes from "./global/workspaces" import templateRoutes from "./global/templates" @@ -40,6 +41,7 @@ export const routes: Router[] = [ accountRoutes, restoreRoutes, eventRoutes, + tenantRoutes, pro.scim, ] diff --git a/packages/worker/src/api/routes/system/tests/status.spec.ts b/packages/worker/src/api/routes/system/tests/status.spec.ts index 71e01a0e72..cfb9e2d68f 100644 --- a/packages/worker/src/api/routes/system/tests/status.spec.ts +++ b/packages/worker/src/api/routes/system/tests/status.spec.ts @@ -27,6 +27,7 @@ describe("/api/system/status", () => { health: { passing: true, }, + version: expect.any(String), }) expect(accounts.getStatus).toHaveBeenCalledTimes(0) config.cloudHosted() diff --git a/packages/worker/src/environment.ts b/packages/worker/src/environment.ts index 70fb911ee1..d642d50846 100644 --- a/packages/worker/src/environment.ts +++ b/packages/worker/src/environment.ts @@ -24,6 +24,7 @@ const environment = { // auth MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY, + AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN, SALT_ROUNDS: process.env.SALT_ROUNDS, REDIS_PASSWORD: process.env.REDIS_PASSWORD, COOKIE_DOMAIN: process.env.COOKIE_DOMAIN, @@ -46,6 +47,7 @@ const environment = { SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED, DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE, SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE, + BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT, // smtp SMTP_USER: process.env.SMTP_USER, SMTP_PASSWORD: process.env.SMTP_PASSWORD, diff --git a/packages/worker/src/index.ts b/packages/worker/src/index.ts index 4e770c6ecb..85e5d6ad2e 100644 --- a/packages/worker/src/index.ts +++ b/packages/worker/src/index.ts @@ -88,7 +88,11 @@ const shutdown = () => { } export default server.listen(parseInt(env.PORT || "4002"), async () => { - console.log(`Worker running on ${JSON.stringify(server.address())}`) + let startupLog = `Worker running on ${JSON.stringify(server.address())}` + if (env.BUDIBASE_ENVIRONMENT) { + startupLog = `${startupLog} - environment: "${env.BUDIBASE_ENVIRONMENT}"` + } + console.log(startupLog) await initPro() await redis.clients.init() cache.docWritethrough.init() diff --git a/packages/worker/src/tests/api/tenants.ts b/packages/worker/src/tests/api/tenants.ts index 16f970915a..c404b8ad58 100644 --- a/packages/worker/src/tests/api/tenants.ts +++ b/packages/worker/src/tests/api/tenants.ts @@ -1,3 +1,4 @@ +import { TenantInfo } from "@budibase/types" import TestConfiguration from "../TestConfiguration" import { TestAPI, TestAPIOpts } from "./base" @@ -14,4 +15,12 @@ export class TenantAPI extends TestAPI { .set(opts?.headers) .expect(opts?.status ? opts.status : 204) } + + saveTenantInfo = (tenantInfo: TenantInfo) => { + return this.request + .post("/api/global/tenant") + .set(this.config.internalAPIHeaders()) + .send(tenantInfo) + .expect(200) + } } diff --git a/scripts/add-app-migration.js b/scripts/add-app-migration.js index a58d3a4fbe..a4e01be635 100644 --- a/scripts/add-app-migration.js +++ b/scripts/add-app-migration.js @@ -21,7 +21,9 @@ const generateTimestamp = () => { } const createMigrationFile = () => { - const migrationFilename = `${generateTimestamp()}_${title}` + const migrationFilename = `${generateTimestamp()}_${title + .replace(/-/g, "_") + .replace(/ /g, "_")}` const migrationsDir = "../packages/server/src/appMigrations" const template = `const migration = async () => { diff --git a/scripts/deploy-camunda.sh b/scripts/deploy-camunda.sh index 7059b6f072..90400a0449 100755 --- a/scripts/deploy-camunda.sh +++ b/scripts/deploy-camunda.sh @@ -23,6 +23,7 @@ echo "deploy processes..." zbctl deploy resource offboarding.bpmn --insecure zbctl deploy resource onboarding.bpmn --insecure zbctl deploy resource free_trial.bpmn --insecure +zbctl deploy resource verify_sso_login.bpmn --insecure cd ../../../../../budibase/packages/account-portal/packages/server diff --git a/yarn.lock b/yarn.lock index 677b7cb441..c060cc9da3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -795,17 +795,22 @@ dependencies: "@azure/msal-common" "14.5.0" +"@azure/msal-common@14.12.0": + version "14.12.0" + resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-14.12.0.tgz#844abe269b071f8fa8949dadc2a7b65bbb147588" + integrity sha512-IDDXmzfdwmDkv4SSmMEyAniJf6fDu3FJ7ncOjlxkDuT85uSnLEhZi3fGZpoR7T4XZpOMx9teM9GXBgrfJgyeBw== + "@azure/msal-common@14.5.0": version "14.5.0" resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-14.5.0.tgz#5a891e5f8eaf23f598bdb4e285e938fb606cd716" integrity sha512-Gx5rZbiZV/HiZ2nEKfjfAF/qDdZ4/QWxMvMo2jhIFVz528dVKtaZyFAOtsX2Ak8+TQvRsGCaEfuwJFuXB6tu1A== "@azure/msal-node@^2.5.1": - version "2.6.0" - resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-2.6.0.tgz#44bad20a1523c92d79de24fb225da86abaeedb6c" - integrity sha512-RWAWCYYrSldIYC47oWtofIun41e6SB9TBYgGYsezq6ednagwo9ZRFyRsvl1NabmdTkdDDXRAABIdveeN2Gtd8w== + version "2.9.2" + resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-2.9.2.tgz#e6d3c1661012c1bd0ef68e328f73a2fdede52931" + integrity sha512-8tvi6Cos3m+0KmRbPjgkySXi+UQU/QiuVRFnrxIwt5xZlEEFa69O04RTaNESGgImyBBlYbo2mfE8/U8Bbdk1WQ== dependencies: - "@azure/msal-common" "14.5.0" + "@azure/msal-common" "14.12.0" jsonwebtoken "^9.0.0" uuid "^8.3.0" @@ -1980,13 +1985,20 @@ resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== -"@babel/runtime@^7.10.5", "@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.15.4", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.10.5", "@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": version "7.24.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.5.tgz#230946857c053a36ccc66e1dd03b17dd0c4ed02c" integrity sha512-Nms86NXrsaeU9vbBJKni6gXiEXZ4CVpYVzEjDH9Sb8vmZ3UljyA1GSOJl/6LGPO8EHLuSF9H+IxNXHPX8QHJ4g== dependencies: regenerator-runtime "^0.14.0" +"@babel/runtime@^7.13.10": + version "7.24.7" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.24.7.tgz#f4f0d5530e8dbdf59b3451b9b3e594b6ba082e12" + integrity sha512-UwgBRMjJP+xv857DCngvqXI3Iq6J4v0wXmwc6sapg+zyhbwmQX67LUEFrkK5tbyJ30jGuG3ZvWpBiB9LCy1kWw== + dependencies: + regenerator-runtime "^0.14.0" + "@babel/template@^7.22.15", "@babel/template@^7.22.5", "@babel/template@^7.3.3": version "7.22.15" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" @@ -2181,11 +2193,11 @@ "@bull-board/api" "5.10.2" "@camunda8/sdk@^8.5.3": - version "8.5.4" - resolved "https://registry.yarnpkg.com/@camunda8/sdk/-/sdk-8.5.4.tgz#cca74d83f849892beae30b180d3883907a3781aa" - integrity sha512-Mr37Kt90NEyra14pV3tGtBCx6Sln185z7cXIS7qkyWfFInvo5VSrzDfOyJi1ilgbP07usSokxI74IVmhz769kQ== + version "8.6.6" + resolved "https://registry.yarnpkg.com/@camunda8/sdk/-/sdk-8.6.6.tgz#39f894f89b485df7c2a803e590d4175fbfb6fb8f" + integrity sha512-u0A1Q0Fwh6W33i9ky2nfA6DJUKgLES8FAhp7k3L7L8ldNM5NgLRLiz1eZgUWK5CT4D78aFoSkm3VobFo+V42yQ== dependencies: - "@grpc/grpc-js" "1.10.7" + "@grpc/grpc-js" "1.10.9" "@grpc/proto-loader" "0.7.13" chalk "^2.4.2" console-stamp "^3.0.2" @@ -2389,230 +2401,230 @@ find-up "^5.0.0" strip-json-comments "^3.1.1" -"@esbuild/aix-ppc64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz#a70f4ac11c6a1dfc18b8bbb13284155d933b9537" - integrity sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g== +"@esbuild/aix-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f" + integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ== "@esbuild/android-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.18.20.tgz#984b4f9c8d0377443cc2dfcef266d02244593622" integrity sha512-Nz4rJcchGDtENV0eMKUNa6L12zz2zBDXuhj/Vjh18zGqB44Bi7MBMSXjgunJgjRhCmKOjnPuZp4Mb6OKqtMHLQ== -"@esbuild/android-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz#db1c9202a5bc92ea04c7b6840f1bbe09ebf9e6b9" - integrity sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg== +"@esbuild/android-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052" + integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A== "@esbuild/android-arm@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.18.20.tgz#fedb265bc3a589c84cc11f810804f234947c3682" integrity sha512-fyi7TDI/ijKKNZTUJAQqiG5T7YjJXgnzkURqmGj13C6dCqckZBLdl4h7bkhHt/t0WP+zO9/zwroDvANaOqO5Sw== -"@esbuild/android-arm@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.20.2.tgz#3b488c49aee9d491c2c8f98a909b785870d6e995" - integrity sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w== +"@esbuild/android-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28" + integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg== "@esbuild/android-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.18.20.tgz#35cf419c4cfc8babe8893d296cd990e9e9f756f2" integrity sha512-8GDdlePJA8D6zlZYJV/jnrRAi6rOiNaCC/JclcXpB+KIuvfBN4owLtgzY2bsxnx666XjJx2kDPUmnTtR8qKQUg== -"@esbuild/android-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.20.2.tgz#3b1628029e5576249d2b2d766696e50768449f98" - integrity sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg== +"@esbuild/android-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e" + integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA== "@esbuild/darwin-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.18.20.tgz#08172cbeccf95fbc383399a7f39cfbddaeb0d7c1" integrity sha512-bxRHW5kHU38zS2lPTPOyuyTm+S+eobPUnTNkdJEfAddYgEcll4xkT8DB9d2008DtTbl7uJag2HuE5NZAZgnNEA== -"@esbuild/darwin-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz#6e8517a045ddd86ae30c6608c8475ebc0c4000bb" - integrity sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA== +"@esbuild/darwin-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a" + integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ== "@esbuild/darwin-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.18.20.tgz#d70d5790d8bf475556b67d0f8b7c5bdff053d85d" integrity sha512-pc5gxlMDxzm513qPGbCbDukOdsGtKhfxD1zJKXjCCcU7ju50O7MeAZ8c4krSJcOIJGFR+qx21yMMVYwiQvyTyQ== -"@esbuild/darwin-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz#90ed098e1f9dd8a9381695b207e1cff45540a0d0" - integrity sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA== +"@esbuild/darwin-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22" + integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw== "@esbuild/freebsd-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.18.20.tgz#98755cd12707f93f210e2494d6a4b51b96977f54" integrity sha512-yqDQHy4QHevpMAaxhhIwYPMv1NECwOvIpGCZkECn8w2WFHXjEwrBn3CeNIYsibZ/iZEUemj++M26W3cNR5h+Tw== -"@esbuild/freebsd-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz#d71502d1ee89a1130327e890364666c760a2a911" - integrity sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw== +"@esbuild/freebsd-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e" + integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g== "@esbuild/freebsd-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.18.20.tgz#c1eb2bff03915f87c29cece4c1a7fa1f423b066e" integrity sha512-tgWRPPuQsd3RmBZwarGVHZQvtzfEBOreNuxEMKFcd5DaDn2PbBxfwLcj4+aenoh7ctXcbXmOQIn8HI6mCSw5MQ== -"@esbuild/freebsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz#aa5ea58d9c1dd9af688b8b6f63ef0d3d60cea53c" - integrity sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw== +"@esbuild/freebsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261" + integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ== "@esbuild/linux-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.18.20.tgz#bad4238bd8f4fc25b5a021280c770ab5fc3a02a0" integrity sha512-2YbscF+UL7SQAVIpnWvYwM+3LskyDmPhe31pE7/aoTMFKKzIc9lLbyGUpmmb8a8AixOL61sQ/mFh3jEjHYFvdA== -"@esbuild/linux-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz#055b63725df678379b0f6db9d0fa85463755b2e5" - integrity sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A== +"@esbuild/linux-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b" + integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q== "@esbuild/linux-arm@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.18.20.tgz#3e617c61f33508a27150ee417543c8ab5acc73b0" integrity sha512-/5bHkMWnq1EgKr1V+Ybz3s1hWXok7mDFUMQ4cG10AfW3wL02PSZi5kFpYKrptDsgb2WAJIvRcDm+qIvXf/apvg== -"@esbuild/linux-arm@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz#76b3b98cb1f87936fbc37f073efabad49dcd889c" - integrity sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg== +"@esbuild/linux-arm@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9" + integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA== "@esbuild/linux-ia32@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.18.20.tgz#699391cccba9aee6019b7f9892eb99219f1570a7" integrity sha512-P4etWwq6IsReT0E1KHU40bOnzMHoH73aXp96Fs8TIT6z9Hu8G6+0SHSw9i2isWrD2nbx2qo5yUqACgdfVGx7TA== -"@esbuild/linux-ia32@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz#c0e5e787c285264e5dfc7a79f04b8b4eefdad7fa" - integrity sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig== +"@esbuild/linux-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2" + integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg== "@esbuild/linux-loong64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.18.20.tgz#e6fccb7aac178dd2ffb9860465ac89d7f23b977d" integrity sha512-nXW8nqBTrOpDLPgPY9uV+/1DjxoQ7DoB2N8eocyq8I9XuqJ7BiAMDMf9n1xZM9TgW0J8zrquIb/A7s3BJv7rjg== -"@esbuild/linux-loong64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz#a6184e62bd7cdc63e0c0448b83801001653219c5" - integrity sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ== +"@esbuild/linux-loong64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df" + integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg== "@esbuild/linux-mips64el@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.18.20.tgz#eeff3a937de9c2310de30622a957ad1bd9183231" integrity sha512-d5NeaXZcHp8PzYy5VnXV3VSd2D328Zb+9dEq5HE6bw6+N86JVPExrA6O68OPwobntbNJ0pzCpUFZTo3w0GyetQ== -"@esbuild/linux-mips64el@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz#d08e39ce86f45ef8fc88549d29c62b8acf5649aa" - integrity sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA== +"@esbuild/linux-mips64el@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe" + integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg== "@esbuild/linux-ppc64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.18.20.tgz#2f7156bde20b01527993e6881435ad79ba9599fb" integrity sha512-WHPyeScRNcmANnLQkq6AfyXRFr5D6N2sKgkFo2FqguP44Nw2eyDlbTdZwd9GYk98DZG9QItIiTlFLHJHjxP3FA== -"@esbuild/linux-ppc64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz#8d252f0b7756ffd6d1cbde5ea67ff8fd20437f20" - integrity sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg== +"@esbuild/linux-ppc64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4" + integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w== "@esbuild/linux-riscv64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.18.20.tgz#6628389f210123d8b4743045af8caa7d4ddfc7a6" integrity sha512-WSxo6h5ecI5XH34KC7w5veNnKkju3zBRLEQNY7mv5mtBmrP/MjNBCAlsM2u5hDBlS3NGcTQpoBvRzqBcRtpq1A== -"@esbuild/linux-riscv64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz#19f6dcdb14409dae607f66ca1181dd4e9db81300" - integrity sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg== +"@esbuild/linux-riscv64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc" + integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA== "@esbuild/linux-s390x@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.18.20.tgz#255e81fb289b101026131858ab99fba63dcf0071" integrity sha512-+8231GMs3mAEth6Ja1iK0a1sQ3ohfcpzpRLH8uuc5/KVDFneH6jtAJLFGafpzpMRO6DzJ6AvXKze9LfFMrIHVQ== -"@esbuild/linux-s390x@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz#3c830c90f1a5d7dd1473d5595ea4ebb920988685" - integrity sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ== +"@esbuild/linux-s390x@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de" + integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A== "@esbuild/linux-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.18.20.tgz#c7690b3417af318a9b6f96df3031a8865176d338" integrity sha512-UYqiqemphJcNsFEskc73jQ7B9jgwjWrSayxawS6UVFZGWrAAtkzjxSqnoclCXxWtfwLdzU+vTpcNYhpn43uP1w== -"@esbuild/linux-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz#86eca35203afc0d9de0694c64ec0ab0a378f6fff" - integrity sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw== +"@esbuild/linux-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0" + integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ== "@esbuild/netbsd-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.18.20.tgz#30e8cd8a3dded63975e2df2438ca109601ebe0d1" integrity sha512-iO1c++VP6xUBUmltHZoMtCUdPlnPGdBom6IrO4gyKPFFVBKioIImVooR5I83nTew5UOYrk3gIJhbZh8X44y06A== -"@esbuild/netbsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz#e771c8eb0e0f6e1877ffd4220036b98aed5915e6" - integrity sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ== +"@esbuild/netbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047" + integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg== "@esbuild/openbsd-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.18.20.tgz#7812af31b205055874c8082ea9cf9ab0da6217ae" integrity sha512-e5e4YSsuQfX4cxcygw/UCPIEP6wbIL+se3sxPdCiMbFLBWu0eiZOJ7WoD+ptCLrmjZBK1Wk7I6D/I3NglUGOxg== -"@esbuild/openbsd-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz#9a795ae4b4e37e674f0f4d716f3e226dd7c39baf" - integrity sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ== +"@esbuild/openbsd-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70" + integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow== "@esbuild/sunos-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.18.20.tgz#d5c275c3b4e73c9b0ecd38d1ca62c020f887ab9d" integrity sha512-kDbFRFp0YpTQVVrqUd5FTYmWo45zGaXe0X8E1G/LKFC0v8x0vWrhOWSLITcCn63lmZIxfOMXtCfti/RxN/0wnQ== -"@esbuild/sunos-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz#7df23b61a497b8ac189def6e25a95673caedb03f" - integrity sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w== +"@esbuild/sunos-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b" + integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg== "@esbuild/win32-arm64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.18.20.tgz#73bc7f5a9f8a77805f357fab97f290d0e4820ac9" integrity sha512-ddYFR6ItYgoaq4v4JmQQaAI5s7npztfV4Ag6NrhiaW0RrnOXqBkgwZLofVTlq1daVTQNhtI5oieTvkRPfZrePg== -"@esbuild/win32-arm64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz#f1ae5abf9ca052ae11c1bc806fb4c0f519bacf90" - integrity sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ== +"@esbuild/win32-arm64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d" + integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A== "@esbuild/win32-ia32@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.18.20.tgz#ec93cbf0ef1085cc12e71e0d661d20569ff42102" integrity sha512-Wv7QBi3ID/rROT08SABTS7eV4hX26sVduqDOTe1MvGMjNd3EjOz4b7zeexIR62GTIEKrfJXKL9LFxTYgkyeu7g== -"@esbuild/win32-ia32@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz#241fe62c34d8e8461cd708277813e1d0ba55ce23" - integrity sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ== +"@esbuild/win32-ia32@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b" + integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA== "@esbuild/win32-x64@0.18.20": version "0.18.20" resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.18.20.tgz#786c5f41f043b07afb1af37683d7c33668858f6d" integrity sha512-kTdfRcSiDfQca/y9QIkng02avJ+NCaQvrMejlsB3RRv5sE9rRoeBPISaZpKxHELzRxZyLvNts1P27W3wV+8geQ== -"@esbuild/win32-x64@0.20.2": - version "0.20.2" - resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz#9c907b21e30a52db959ba4f80bb01a0cc403d5cc" - integrity sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ== +"@esbuild/win32-x64@0.21.5": + version "0.21.5" + resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c" + integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw== "@eslint-community/eslint-utils@^4.2.0", "@eslint-community/eslint-utils@^4.4.0": version "4.4.0" @@ -2767,18 +2779,18 @@ oauth "^0.10.0" passport-strategy "^1.0.0" -"@grpc/grpc-js@1.10.7": - version "1.10.7" - resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.10.7.tgz#1abce1a8c4c90b79dbbe57d7e4310f3b0ce72899" - integrity sha512-ZMBVjSeDAz3tFSehyO6Pd08xZT1HfIwq3opbeM4cDlBh52gmwp0wVIPcQur53NN0ac68HMZ/7SF2rGRD5KmVmg== +"@grpc/grpc-js@1.10.9": + version "1.10.9" + resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.10.9.tgz#468cc1549a3fe37b760a16745fb7685d91f4f10c" + integrity sha512-5tcgUctCG0qoNyfChZifz2tJqbRbXVO9J7X6duFcOjY3HUNCxg5D0ZCK7EP9vIcZ0zRpLU9bWkyCqVCLZ46IbQ== dependencies: "@grpc/proto-loader" "^0.7.13" "@js-sdsl/ordered-map" "^4.4.2" "@grpc/grpc-js@~1.8.0": - version "1.8.21" - resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.8.21.tgz#d282b122c71227859bf6c5866f4c40f4a2696513" - integrity sha512-KeyQeZpxeEBSqFVTi3q2K7PiPXmgBfECc4updA1ejCLjYmoAlvvM3ZMp5ztTDUCUQmoY3CpDxvchjO1+rFkoHg== + version "1.8.22" + resolved "https://registry.yarnpkg.com/@grpc/grpc-js/-/grpc-js-1.8.22.tgz#847930c9af46e14df05b57fc12325db140ceff1d" + integrity sha512-oAjDdN7fzbUi+4hZjKG96MR6KTEubAeMpQEb+77qy+3r0Ua5xTFuie6JOLr4ZZgl5g+W5/uRTS2M1V8mVAFPuA== dependencies: "@grpc/proto-loader" "^0.7.0" "@types/node" ">=12.12.47" @@ -3483,10 +3495,10 @@ dependencies: lodash "^4.17.21" -"@koa/cors@^3.1.0": - version "3.4.3" - resolved "https://registry.yarnpkg.com/@koa/cors/-/cors-3.4.3.tgz#d669ee6e8d6e4f0ec4a7a7b0a17e7a3ed3752ebb" - integrity sha512-WPXQUaAeAMVaLTEFpoq3T2O1C+FstkjJnDQqy95Ck1UdILajsRhu6mhJ8H2f4NFPRBoCNN+qywTJfq/gGki5mw== +"@koa/cors@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@koa/cors/-/cors-5.0.0.tgz#0029b5f057fa0d0ae0e37dd2c89ece315a0daffd" + integrity sha512-x/iUDjcS90W69PryLDIMgFyV21YLTnG9zOpPXS7Bkt2b8AsY3zZsIpOLBkYr9fBcF3HbkKaER5hOBZLfpLgYNw== dependencies: vary "^1.1.2" @@ -4124,7 +4136,15 @@ "@rollup/pluginutils" "^3.1.0" magic-string "^0.25.7" -"@rollup/plugin-replace@^5.0.2", "@rollup/plugin-replace@^5.0.3": +"@rollup/plugin-replace@^5.0.2": + version "5.0.7" + resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-5.0.7.tgz#150c9ee9db8031d9e4580a61a0edeaaed3d37687" + integrity sha512-PqxSfuorkHz/SPpyngLyg5GCEkOcee9M1bkxiVDr41Pd61mqP1PLOoDPbpl44SB2mQGKwV/In74gqQmGITOhEQ== + dependencies: + "@rollup/pluginutils" "^5.0.1" + magic-string "^0.30.3" + +"@rollup/plugin-replace@^5.0.3": version "5.0.5" resolved "https://registry.yarnpkg.com/@rollup/plugin-replace/-/plugin-replace-5.0.5.tgz#33d5653dce6d03cb24ef98bef7f6d25b57faefdf" integrity sha512-rYO4fOi8lMaTg/z5Jb+hKnrHHVn8j2lwkqwyS4kTRhKyWOLf2wST2sWXr4WzWiTcoHTp2sTjqUbqIj2E39slKQ== @@ -5797,10 +5817,10 @@ "@types/koa-compose" "*" "@types/node" "*" -"@types/koa__cors@^3.1.1": - version "3.3.1" - resolved "https://registry.yarnpkg.com/@types/koa__cors/-/koa__cors-3.3.1.tgz#0ec7543c4c620fd23451bfdd3e21b9a6aadedccd" - integrity sha512-aFGYhTFW7651KhmZZ05VG0QZJre7QxBxDj2LF1lf6GA/wSXEfKVAJxiQQWzRV4ZoMzQIO8vJBXKsUcRuvYK9qw== +"@types/koa__cors@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@types/koa__cors/-/koa__cors-5.0.0.tgz#74567a045b599266e2cd3940cef96cedecc2ef1f" + integrity sha512-LCk/n25Obq5qlernGOK/2LUwa/2YJb2lxHUkkvYFDOpLXlVI6tKcdfCHRBQnOY4LwH6el5WOLs6PD/a8Uzau6g== dependencies: "@types/koa" "*" @@ -5891,7 +5911,15 @@ "@types/node" "*" form-data "^3.0.0" -"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0", "@types/node@>=8.1.0", "@types/node@^20.4.5": +"@types/node-fetch@^2.6.4": + version "2.6.11" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.11.tgz#9b39b78665dae0e82a08f02f4967d62c66f95d24" + integrity sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g== + dependencies: + "@types/node" "*" + form-data "^4.0.0" + +"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0", "@types/node@^20.4.5": version "20.12.10" resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.10.tgz#8f0c3f12b0f075eee1fe20c1afb417e9765bef76" integrity sha512-Eem5pH9pmWBHoGAT8Dr5fdc5rYA+4NAovdM4EktRPVAAiJhmWWfQrA0cFhAbOsQdSfIHjAud6YdkbL69+zSKjw== @@ -5920,6 +5948,13 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.37.tgz#0bfcd173e8e1e328337473a8317e37b3b14fd30d" integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg== +"@types/node@>=8.1.0": + version "20.14.5" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.14.5.tgz#fe35e3022ebe58b8f201580eb24e1fcfc0f2487d" + integrity sha512-aoRR+fJkZT2l0aGOJhuA8frnCSoNX6W7U2mpNq63+BxBIj5BQFt8rHy627kijCmm63ijdSdwvGgpUsU6MBsZZA== + dependencies: + undici-types "~5.26.4" + "@types/node@^18.11.18": version "18.19.10" resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.10.tgz#4de314ab66faf6bc8ba691021a091ddcdf13a158" @@ -5940,9 +5975,9 @@ integrity sha512-Gj7cI7z+98M282Tqmp2K5EIsoouUEzbBJhQQzDE3jSIRk6r9gsz0oUokqIUR4u1R3dMHo0pDHM7sNOHyhulypw== "@types/oauth@*": - version "0.9.4" - resolved "https://registry.yarnpkg.com/@types/oauth/-/oauth-0.9.4.tgz#dcbab5efa2f34f312b915f80685760ccc8111e0a" - integrity sha512-qk9orhti499fq5XxKCCEbd0OzdPZuancneyse3KtR+vgMiHRbh+mn8M4G6t64ob/Fg+GZGpa565MF/2dKWY32A== + version "0.9.5" + resolved "https://registry.yarnpkg.com/@types/oauth/-/oauth-0.9.5.tgz#acc4209bfa1c8d7d3aaf2c9ad0b32216a29616c1" + integrity sha512-+oQ3C2Zx6ambINOcdIARF5Z3Tu3x//HipE889/fqo3sgpQZbe9c6ExdQFtN6qlhpR7p83lTZfPJt0tCAW29dog== dependencies: "@types/node" "*" @@ -5970,9 +6005,9 @@ "@types/passport-oauth2" "*" "@types/passport-oauth2@*": - version "1.4.16" - resolved "https://registry.yarnpkg.com/@types/passport-oauth2/-/passport-oauth2-1.4.16.tgz#59189a9d69783a63d7fb92d19cd28f96c95740af" - integrity sha512-Sdr0rpAdkiidUOtyaapGgvXyMjqYlMTFHRy7gtJtzr0/ysEIa72N3j2FSHIRc14h29g1+dzDl8IW2WT2Mu29vQ== + version "1.4.17" + resolved "https://registry.yarnpkg.com/@types/passport-oauth2/-/passport-oauth2-1.4.17.tgz#d5d54339d44f6883d03e69dc0cc0e2114067abb4" + integrity sha512-ODiAHvso6JcWJ6ZkHHroVp05EHGhqQN533PtFNBkg8Fy5mERDqsr030AX81M0D69ZcaMvhF92SRckEk2B0HYYg== dependencies: "@types/express" "*" "@types/oauth" "*" @@ -6348,6 +6383,11 @@ dependencies: "@types/estree" "*" +"@types/tmp@0.2.6": + version "0.2.6" + resolved "https://registry.yarnpkg.com/@types/tmp/-/tmp-0.2.6.tgz#d785ee90c52d7cc020e249c948c36f7b32d1e217" + integrity sha512-chhaNf2oKHlRkDGt+tiKE2Z5aJ6qalm7Z9rlLdBwmOiAAf09YQvvoLXjWK4HWPF1xU/fqvMgfNfpVoBscA/tKA== + "@types/tough-cookie@*", "@types/tough-cookie@^4.0.2": version "4.0.2" resolved "https://registry.yarnpkg.com/@types/tough-cookie/-/tough-cookie-4.0.2.tgz#6286b4c7228d58ab7866d19716f3696e03a09397" @@ -6849,11 +6889,18 @@ acorn-walk@^8.0.2, acorn-walk@^8.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -acorn-walk@^8.2.0, acorn-walk@^8.3.2: +acorn-walk@^8.2.0: version "8.3.2" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa" integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== +acorn-walk@^8.3.2: + version "8.3.3" + resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.3.tgz#9caeac29eefaa0c41e3d4c65137de4d6f34df43e" + integrity sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw== + dependencies: + acorn "^8.11.0" + acorn@^5.2.1, acorn@^5.7.3: version "5.7.4" resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e" @@ -6869,6 +6916,11 @@ acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8. resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b" integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w== +acorn@^8.11.0: + version "8.12.0" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.12.0.tgz#1627bfa2e058148036133b8d9b51a700663c294c" + integrity sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw== + acorn@^8.11.3, acorn@^8.8.1: version "8.11.3" resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" @@ -7432,7 +7484,7 @@ axios-retry@^3.1.9: "@babel/runtime" "^7.15.4" is-retry-allowed "^2.2.0" -axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^0.26.0, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0: +axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0: version "1.6.3" resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4" integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww== @@ -7700,7 +7752,7 @@ bl@^4.0.3, bl@^4.1.0: inherits "^2.0.4" readable-stream "^3.4.0" -bl@^6.0.12, bl@^6.0.3: +bl@^6.0.3: version "6.0.12" resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.12.tgz#77c35b96e13aeff028496c798b75389ddee9c7f8" integrity sha512-EnEYHilP93oaOa2MnmNEjAcovPS3JlQZOyzGXi3EyEpPhm9qWvdDp7BmAVEVusGzp8LlwQK56Av+OkDoRjzE0w== @@ -7787,11 +7839,11 @@ brace-expansion@^2.0.1: balanced-match "^1.0.0" braces@^3.0.2, braces@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== + version "3.0.3" + resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789" + integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA== dependencies: - fill-range "^7.0.1" + fill-range "^7.1.1" brorand@^1.0.1, brorand@^1.1.0: version "1.1.0" @@ -10252,7 +10304,7 @@ engine.io-parser@~5.0.3: resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.0.6.tgz#7811244af173e157295dec9b2718dfe42a64ef45" integrity sha512-tjuoZDMAdEhVnSFleYPCtdL2GXwVTGtNjoeJd9IhIG3C1xs9uwxqRNEu5WpnDZCaozwVlK/nuQhpodhXSIMaxw== -engine.io@~6.4.1: +engine.io@~6.4.2: version "6.4.2" resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.4.2.tgz#ffeaf68f69b1364b0286badddf15ff633476473f" integrity sha512-FKn/3oMiJjrOEOeUub2WCox6JhxBXq/Zn3fZOMCBxKnNYtsdKjxhl7yR3fZhM9PV+rdE75SU5SYMc+2PGzo+Tg== @@ -10510,34 +10562,34 @@ esbuild@^0.18.10, esbuild@^0.18.17: "@esbuild/win32-ia32" "0.18.20" "@esbuild/win32-x64" "0.18.20" -esbuild@^0.20.1: - version "0.20.2" - resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.20.2.tgz#9d6b2386561766ee6b5a55196c6d766d28c87ea1" - integrity sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g== +esbuild@^0.21.3: + version "0.21.5" + resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d" + integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw== optionalDependencies: - "@esbuild/aix-ppc64" "0.20.2" - "@esbuild/android-arm" "0.20.2" - "@esbuild/android-arm64" "0.20.2" - "@esbuild/android-x64" "0.20.2" - "@esbuild/darwin-arm64" "0.20.2" - "@esbuild/darwin-x64" "0.20.2" - "@esbuild/freebsd-arm64" "0.20.2" - "@esbuild/freebsd-x64" "0.20.2" - "@esbuild/linux-arm" "0.20.2" - "@esbuild/linux-arm64" "0.20.2" - "@esbuild/linux-ia32" "0.20.2" - "@esbuild/linux-loong64" "0.20.2" - "@esbuild/linux-mips64el" "0.20.2" - "@esbuild/linux-ppc64" "0.20.2" - "@esbuild/linux-riscv64" "0.20.2" - "@esbuild/linux-s390x" "0.20.2" - "@esbuild/linux-x64" "0.20.2" - "@esbuild/netbsd-x64" "0.20.2" - "@esbuild/openbsd-x64" "0.20.2" - "@esbuild/sunos-x64" "0.20.2" - "@esbuild/win32-arm64" "0.20.2" - "@esbuild/win32-ia32" "0.20.2" - "@esbuild/win32-x64" "0.20.2" + "@esbuild/aix-ppc64" "0.21.5" + "@esbuild/android-arm" "0.21.5" + "@esbuild/android-arm64" "0.21.5" + "@esbuild/android-x64" "0.21.5" + "@esbuild/darwin-arm64" "0.21.5" + "@esbuild/darwin-x64" "0.21.5" + "@esbuild/freebsd-arm64" "0.21.5" + "@esbuild/freebsd-x64" "0.21.5" + "@esbuild/linux-arm" "0.21.5" + "@esbuild/linux-arm64" "0.21.5" + "@esbuild/linux-ia32" "0.21.5" + "@esbuild/linux-loong64" "0.21.5" + "@esbuild/linux-mips64el" "0.21.5" + "@esbuild/linux-ppc64" "0.21.5" + "@esbuild/linux-riscv64" "0.21.5" + "@esbuild/linux-s390x" "0.21.5" + "@esbuild/linux-x64" "0.21.5" + "@esbuild/netbsd-x64" "0.21.5" + "@esbuild/openbsd-x64" "0.21.5" + "@esbuild/sunos-x64" "0.21.5" + "@esbuild/win32-arm64" "0.21.5" + "@esbuild/win32-ia32" "0.21.5" + "@esbuild/win32-x64" "0.21.5" escalade@^3.1.1: version "3.1.1" @@ -11165,7 +11217,14 @@ fast-xml-parser@4.2.5: dependencies: strnum "^1.0.5" -fast-xml-parser@^4.1.3, fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: +fast-xml-parser@^4.1.3: + version "4.4.0" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.4.0.tgz#341cc98de71e9ba9e651a67f41f1752d1441a501" + integrity sha512-kLY3jFlwIYwBNDojclKsNAC12sfD6NwW74QB2CoNGPvtVxjliYehVunB3HYyNi+n4Tt1dAcgwYvmKF/Z18flqg== + dependencies: + strnum "^1.0.5" + +fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: version "4.3.6" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.6.tgz#190f9d99097f0c8f2d3a0e681a10404afca052ff" integrity sha512-M2SovcRxD4+vC493Uc2GZVcZaj66CCJhWurC4viynVSTvrpErCShNcDz1lAho6n9REQKvL/ll4A4/fw6Y9z8nw== @@ -11324,10 +11383,10 @@ filing-cabinet@^3.0.1: tsconfig-paths "^3.10.1" typescript "^3.9.7" -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== +fill-range@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292" + integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg== dependencies: to-regex-range "^5.0.1" @@ -11429,6 +11488,11 @@ forever-agent@~0.6.1: resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== +form-data-encoder@1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040" + integrity sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A== + form-data@4.0.0, form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" @@ -11465,6 +11529,14 @@ form-data@~2.3.2: combined-stream "^1.0.6" mime-types "^2.1.12" +formdata-node@^4.3.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-4.4.1.tgz#23f6a5cb9cb55315912cbec4ff7b0f59bbd191e2" + integrity sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ== + dependencies: + node-domexception "1.0.0" + web-streams-polyfill "4.0.0-beta.3" + formidable@^1.1.1: version "1.2.6" resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.6.tgz#d2a51d60162bbc9b4a055d8457a7c75315d1a168" @@ -11904,6 +11976,17 @@ glob@^10.0.0, glob@^10.2.2: minipass "^7.0.4" path-scurry "^1.10.2" +glob@^10.3.7: + version "10.4.1" + resolved "https://registry.yarnpkg.com/glob/-/glob-10.4.1.tgz#0cfb01ab6a6b438177bfe6a58e2576f6efe909c2" + integrity sha512-2jelhlq3E4ho74ZyVLN03oKdAZVUa6UDZzFLVH1H7dnoax+y9qyaq8zBkfDIggjniU19z0wU18y16jMB2eyVIw== + dependencies: + foreground-child "^3.1.0" + jackspeak "^3.1.2" + minimatch "^9.0.4" + minipass "^7.1.2" + path-scurry "^1.11.1" + glob@^5.0.15: version "5.0.15" resolved "https://registry.yarnpkg.com/glob/-/glob-5.0.15.tgz#1bc936b9e02f4a603fcc222ecf7633d30b8b93b1" @@ -13472,6 +13555,15 @@ jackspeak@^2.3.6: optionalDependencies: "@pkgjs/parseargs" "^0.11.0" +jackspeak@^3.1.2: + version "3.4.0" + resolved "https://registry.yarnpkg.com/jackspeak/-/jackspeak-3.4.0.tgz#a75763ff36ad778ede6a156d8ee8b124de445b4a" + integrity sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw== + dependencies: + "@isaacs/cliui" "^8.0.2" + optionalDependencies: + "@pkgjs/parseargs" "^0.11.0" + jake@^10.8.5: version "10.8.5" resolved "https://registry.yarnpkg.com/jake/-/jake-10.8.5.tgz#f2183d2c59382cb274226034543b9c03b8164c46" @@ -15751,6 +15843,13 @@ minimatch@^8.0.2: dependencies: brace-expansion "^2.0.1" +minimatch@^9.0.4: + version "9.0.4" + resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-9.0.4.tgz#8e49c731d1749cbec05050ee5145147b32496a51" + integrity sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw== + dependencies: + brace-expansion "^2.0.1" + minimist-options@4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/minimist-options/-/minimist-options-4.1.0.tgz#c0655713c53a8a2ebd77ffa247d342c40f010619" @@ -15845,6 +15944,11 @@ minipass@^5.0.0: resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.0.4.tgz#dbce03740f50a4786ba994c1fb908844d27b038c" integrity sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ== +minipass@^7.1.2: + version "7.1.2" + resolved "https://registry.yarnpkg.com/minipass/-/minipass-7.1.2.tgz#93a9626ce5e5e66bd4db86849e7515e92340a707" + integrity sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw== + minizlib@^2.1.1, minizlib@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" @@ -15870,7 +15974,7 @@ mkdirp@^1.0.3, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mlly@^1.1.0, mlly@^1.4.2, mlly@^1.7.0: +mlly@^1.1.0, mlly@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.7.0.tgz#587383ae40dda23cadb11c3c3cc972b277724271" integrity sha512-U9SDaXGEREBYQgfejV97coK0UL1r+qnF2SyO9A3qcI8MzKnsIFKHNVEkrDyNncQTKQQumsasmeq84eNMdBfsNQ== @@ -15880,6 +15984,16 @@ mlly@^1.1.0, mlly@^1.4.2, mlly@^1.7.0: pkg-types "^1.1.0" ufo "^1.5.3" +mlly@^1.4.2: + version "1.7.1" + resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.7.1.tgz#e0336429bb0731b6a8e887b438cbdae522c8f32f" + integrity sha512-rrVRZRELyQzrIUAVMHxP97kv+G786pHmOKzuFII8zDYahFBS7qnHh2AlYSl1GAHhaMPCz6/oHjVMcfFYgFYHgA== + dependencies: + acorn "^8.11.3" + pathe "^1.1.2" + pkg-types "^1.1.1" + ufo "^1.5.3" + modify-values@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/modify-values/-/modify-values-1.0.1.tgz#b3939fa605546474e3e3e3c63d64bd43b4ee6022" @@ -16033,10 +16147,10 @@ mute-stream@~1.0.0: resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-1.0.0.tgz#e31bd9fe62f0aed23520aa4324ea6671531e013e" integrity sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA== -mysql2@3.9.7: - version "3.9.7" - resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.7.tgz#843755daf65b5ef08afe545fe14b8fb62824741a" - integrity sha512-KnJT8vYRcNAZv73uf9zpXqNbvBG7DJrs+1nACsjZP1HMJ1TgXEy8wnNilXAn/5i57JizXKtrUtwDB7HxT9DDpw== +mysql2@3.9.8: + version "3.9.8" + resolved "https://registry.yarnpkg.com/mysql2/-/mysql2-3.9.8.tgz#fe8a0f975f2c495ed76ca988ddc5505801dc49ce" + integrity sha512-+5JKNjPuks1FNMoy9TYpl77f+5frbTklz7eb3XDwbpsERRLEeXiW2PDEkakYF50UuKU2qwfGnyXpKYvukv8mGA== dependencies: denque "^2.1.0" generate-function "^2.3.1" @@ -16169,6 +16283,11 @@ node-addon-api@^6.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== +node-domexception@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" + integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== + node-fetch@2.6.0, node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -16262,10 +16381,10 @@ node-source-walk@^5.0.0: dependencies: "@babel/parser" "^7.0.0" -nodemailer@6.7.2: - version "6.7.2" - resolved "https://registry.yarnpkg.com/nodemailer/-/nodemailer-6.7.2.tgz#44b2ad5f7ed71b7067f7a21c4fedabaec62b85e0" - integrity sha512-Dz7zVwlef4k5R71fdmxwR8Q39fiboGbu3xgswkzGwczUfjp873rVxt1O46+Fh0j1ORnAC6L9+heI8uUpO6DT7Q== +nodemailer@6.9.13: + version "6.9.13" + resolved "https://registry.yarnpkg.com/nodemailer/-/nodemailer-6.9.13.tgz#5b292bf1e92645f4852ca872c56a6ba6c4a3d3d6" + integrity sha512-7o38Yogx6krdoBf3jCAqnIN4oSQFx+fMa0I7dK1D+me9kBxx12D+/33wSb+fhOCtIxvYJ+4x4IMEhmhCKfAiOA== nodemailer@6.9.9: version "6.9.9" @@ -16798,13 +16917,19 @@ open@^8.0.0, open@^8.4.0, open@~8.4.0: is-docker "^2.1.1" is-wsl "^2.2.0" -openai@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/openai/-/openai-3.2.1.tgz#1fa35bdf979cbde8453b43f2dd3a7d401ee40866" - integrity sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A== +openai@^4.52.1: + version "4.52.1" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.1.tgz#44acc362a844fa2927b0cfa1fb70fb51e388af65" + integrity sha512-kv2hevAWZZ3I/vd2t8znGO2rd8wkowncsfcYpo8i+wU9ML+JEcdqiViANXXjWWGjIhajFNixE6gOY1fEgqILAg== dependencies: - axios "^0.26.0" - form-data "^4.0.0" + "@types/node" "^18.11.18" + "@types/node-fetch" "^2.6.4" + abort-controller "^3.0.0" + agentkeepalive "^4.2.1" + form-data-encoder "1.7.2" + formdata-node "^4.3.2" + node-fetch "^2.6.7" + web-streams-polyfill "^3.2.1" openapi-response-validator@^9.2.0: version "9.3.1" @@ -17378,6 +17503,14 @@ path-scurry@^1.10.2, path-scurry@^1.6.1: lru-cache "^10.2.0" minipass "^5.0.0 || ^6.0.2 || ^7.0.0" +path-scurry@^1.11.1: + version "1.11.1" + resolved "https://registry.yarnpkg.com/path-scurry/-/path-scurry-1.11.1.tgz#7960a668888594a0720b12a911d1a742ab9f11d2" + integrity sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA== + dependencies: + lru-cache "^10.2.0" + minipass "^5.0.0 || ^6.0.2 || ^7.0.0" + path-to-regexp@1.x: version "1.8.0" resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" @@ -17684,7 +17817,7 @@ pkg-dir@^4.2.0: dependencies: find-up "^4.0.0" -pkg-types@^1.0.3, pkg-types@^1.1.0: +pkg-types@^1.0.3, pkg-types@^1.1.0, pkg-types@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/pkg-types/-/pkg-types-1.1.1.tgz#07b626880749beb607b0c817af63aac1845a73f2" integrity sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ== @@ -18041,7 +18174,7 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" -posthog-js@^1.118.0, posthog-js@^1.13.4: +posthog-js@^1.118.0: version "1.131.3" resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.131.3.tgz#bd3e6123dc715f089825a92d3ec62480b7ec0a76" integrity sha512-ds/TADDS+rT/WgUyeW4cJ+X+fX+O1KdkOyssNI/tP90PrFf0IJsck5B42YOLhfz87U2vgTyBaKHkdlMgWuOFog== @@ -18049,6 +18182,14 @@ posthog-js@^1.118.0, posthog-js@^1.13.4: fflate "^0.4.8" preact "^10.19.3" +posthog-js@^1.13.4: + version "1.139.2" + resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.139.2.tgz#f8de29edf2770da47fcccb7838902d1e89d6b43d" + integrity sha512-myyuOADqZvYwgqmriwlKDEUDwLhscivFLh67UWBj4Wt9kOlmklvJb36W0ES2GAS6IdojbnGZGH5lF3heqreLWQ== + dependencies: + fflate "^0.4.8" + preact "^10.19.3" + posthog-node@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/posthog-node/-/posthog-node-1.3.0.tgz#804ed2f213a2f05253f798bf9569d55a9cad94f7" @@ -19318,6 +19459,13 @@ rimraf@^4.4.1: dependencies: glob "^9.2.0" +rimraf@^5.0.7: + version "5.0.7" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-5.0.7.tgz#27bddf202e7d89cb2e0381656380d1734a854a74" + integrity sha512-nV6YcJo5wbLW77m+8KjH8aB/7/rxQy9SZ0HY5shnwULfS+9nmTtVXAJET5NdZmCzA4fPI/Hm1wo/Po/4mopOdg== + dependencies: + glob "^10.3.7" + ripemd160@^2.0.0, ripemd160@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/ripemd160/-/ripemd160-2.0.2.tgz#a1c1a6f624751577ba5d07914cbc92850585890c" @@ -20044,17 +20192,25 @@ socket.io-parser@~4.2.1: "@socket.io/component-emitter" "~3.1.0" debug "~4.3.1" -socket.io@4.6.1: - version "4.6.1" - resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.1.tgz#62ec117e5fce0692fa50498da9347cfb52c3bc70" - integrity sha512-KMcaAi4l/8+xEjkRICl6ak8ySoxsYG+gG6/XfRCPJPQ/haCRIJBTL4wIl8YCsmtaBovcAXGLOShyVWQ/FG8GZA== +socket.io-parser@~4.2.4: + version "4.2.4" + resolved "https://registry.yarnpkg.com/socket.io-parser/-/socket.io-parser-4.2.4.tgz#c806966cf7270601e47469ddeec30fbdfda44c83" + integrity sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew== + dependencies: + "@socket.io/component-emitter" "~3.1.0" + debug "~4.3.1" + +socket.io@4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.6.2.tgz#d597db077d4df9cbbdfaa7a9ed8ccc3d49439786" + integrity sha512-Vp+lSks5k0dewYTfwgPT9UeGGd+ht7sCpB7p0e83VgO4X/AHYWhXITMrNk/pg8syY2bpx23ptClCQuHhqi2BgQ== dependencies: accepts "~1.3.4" base64id "~2.0.0" debug "~4.3.2" - engine.io "~6.4.1" + engine.io "~6.4.2" socket.io-adapter "~2.5.2" - socket.io-parser "~4.2.1" + socket.io-parser "~4.2.4" socks-proxy-agent@^7.0.0: version "7.0.0" @@ -20986,19 +21142,7 @@ tar@6.1.11: mkdirp "^1.0.3" yallist "^4.0.0" -tar@6.1.15: - version "6.1.15" - resolved "https://registry.yarnpkg.com/tar/-/tar-6.1.15.tgz#c9738b0b98845a3b344d334b8fa3041aaba53a69" - integrity sha512-/zKt9UyngnxIT/EAGYuxaMYgOIJiP81ab9ZfkILq4oNLPFX50qyYmu7jRj9qeXoxmJHjGlbH0+cm2uy1WCs10A== - dependencies: - chownr "^2.0.0" - fs-minipass "^2.0.0" - minipass "^5.0.0" - minizlib "^2.1.1" - mkdirp "^1.0.3" - yallist "^4.0.0" - -tar@^6.1.11, tar@^6.1.2: +tar@6.2.1, tar@^6.1.11, tar@^6.1.2: version "6.2.1" resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== @@ -21236,6 +21380,11 @@ tlhunter-sorted-set@^0.1.0: resolved "https://registry.yarnpkg.com/tlhunter-sorted-set/-/tlhunter-sorted-set-0.1.0.tgz#1c3eae28c0fa4dff97e9501d2e3c204b86406f4b" integrity sha512-eGYW4bjf1DtrHzUYxYfAcSytpOkA44zsr7G2n3PV7yOUR23vmkGe3LL4R+1jL9OsXtbsFOwe8XtbCrabeaEFnw== +tmp@0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.3.tgz#eb783cc22bc1e8bebd0671476d46ea4eb32a79ae" + integrity sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w== + tmp@^0.0.33: version "0.0.33" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" @@ -21642,6 +21791,11 @@ typescript@5.2.2, "typescript@>=3 < 6": resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w== +typescript@5.5.2: + version "5.5.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507" + integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew== + typescript@^3.9.10, typescript@^3.9.5, typescript@^3.9.7: version "3.9.10" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.10.tgz#70f3910ac7a51ed6bef79da7800690b19bf778b8" @@ -22086,11 +22240,11 @@ vite@^4.5.0: fsevents "~2.3.2" vite@^5.0.0: - version "5.2.12" - resolved "https://registry.yarnpkg.com/vite/-/vite-5.2.12.tgz#3536c93c58ba18edea4915a2ac573e6537409d97" - integrity sha512-/gC8GxzxMK5ntBwb48pR32GGhENnjtY30G4A0jemunsBkiEZFw60s8InGpN8gkhHEkjnRK1aSAxeQgwvFhUHAA== + version "5.3.1" + resolved "https://registry.yarnpkg.com/vite/-/vite-5.3.1.tgz#bb2ca6b5fd7483249d3e86b25026e27ba8a663e6" + integrity sha512-XBmSKRLXLxiaPYamLv3/hnP/KXDai1NDexN0FpkTaZXTfycHvkRHoenpgl/fvuK/kPbB6xAgoyiryAhQNxYmAQ== dependencies: - esbuild "^0.20.1" + esbuild "^0.21.3" postcss "^8.4.38" rollup "^4.13.0" optionalDependencies: @@ -22212,6 +22366,16 @@ wcwidth@^1.0.0, wcwidth@^1.0.1: dependencies: defaults "^1.0.3" +web-streams-polyfill@4.0.0-beta.3: + version "4.0.0-beta.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38" + integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug== + +web-streams-polyfill@^3.2.1: + version "3.3.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz#2073b91a2fdb1fbfbd401e7de0ac9f8214cecb4b" + integrity sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw== + webfinger@^0.4.2: version "0.4.2" resolved "https://registry.yarnpkg.com/webfinger/-/webfinger-0.4.2.tgz#3477a6d97799461896039fcffc650b73468ee76d" @@ -22531,14 +22695,14 @@ write-stream@~0.4.3: readable-stream "~0.0.2" ws@^7.4.6: - version "7.5.9" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" - integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== + version "7.5.10" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.10.tgz#58b5c20dc281633f6c19113f39b349bd8bd558d9" + integrity sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ== ws@^8.13.0: - version "8.13.0" - resolved "https://registry.yarnpkg.com/ws/-/ws-8.13.0.tgz#9a9fb92f93cf41512a0735c8f4dd09b8a1211cd0" - integrity sha512-x9vcZYTrFPC7aSIbj7sRCYo7L/Xb8Iy+pW0ng0wt2vCJv7M9HOMy0UoN3rr+IFC7hb7vXoqS+P9ktyLLLhO+LA== + version "8.17.1" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.17.1.tgz#9293da530bb548febc95371d90f9c878727d919b" + integrity sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ== ws@~8.11.0: version "8.11.0"