diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index b7dbcae771..86fd4f6799 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -25,7 +25,7 @@ env: BASE_BRANCH: ${{ github.event.pull_request.base.ref}} PERSONAL_ACCESS_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} NX_BASE_BRANCH: origin/${{ github.base_ref }} - USE_NX_AFFECTED: ${{ github.event_name == 'pull_request' }} + ONLY_AFFECTED_TASKS: ${{ github.event_name == 'pull_request' }} IS_OSS_CONTRIBUTOR: ${{ inputs.run_as_oss == true || (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != 'Budibase/budibase') }} jobs: @@ -72,7 +72,7 @@ jobs: # Check the types of the projects built via esbuild - name: Check types run: | - if ${{ env.USE_NX_AFFECTED }}; then + if ${{ env.ONLY_AFFECTED_TASKS }}; then yarn check:types --since=${{ env.NX_BASE_BRANCH }} --ignore @budibase/account-portal-server else yarn check:types --ignore @budibase/account-portal-server @@ -116,7 +116,7 @@ jobs: - run: yarn --frozen-lockfile - name: Test run: | - if ${{ env.USE_NX_AFFECTED }}; then + if ${{ env.ONLY_AFFECTED_TASKS }}; then yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} else yarn test --ignore=@budibase/worker --ignore=@budibase/server @@ -140,8 +140,8 @@ jobs: - run: yarn --frozen-lockfile - name: Test worker run: | - if ${{ env.USE_NX_AFFECTED }}; then - yarn test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }} + if ${{ env.ONLY_AFFECTED_TASKS }}; then + node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }} else yarn test --scope=@budibase/worker fi @@ -179,16 +179,9 @@ jobs: - run: yarn --frozen-lockfile - name: Test server - env: - DD_CIVISIBILITY_AGENTLESS_ENABLED: true - DD_API_KEY: "${{ secrets.DATADOG_API_KEY }}" - DD_SITE: "datadoghq.eu" - NODE_OPTIONS: "-r dd-trace/ci/init" - DD_ENV: "ci" - DD_SERVICE: "budibase/packages/server" run: | - if ${{ env.USE_NX_AFFECTED }}; then - yarn test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }} + if ${{ env.ONLY_AFFECTED_TASKS }}; then + node scripts/run-affected.js --task=test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }} else yarn test --scope=@budibase/server fi @@ -233,10 +226,11 @@ jobs: if: ${{ steps.get_pro_commits.outputs.base_commit_excluding_merges != '' }} run: | cd packages/pro + base_commit='${{ steps.get_pro_commits.outputs.base_commit }}' base_commit_excluding_merges='${{ steps.get_pro_commits.outputs.base_commit_excluding_merges }}' pro_commit='${{ steps.get_pro_commits.outputs.pro_commit }}' - any_commit=$(git log --no-merges $base_commit_excluding_merges...$pro_commit) + any_commit=$(git log --no-merges $base_commit...$pro_commit) if [ -n "$any_commit" ]; then echo $any_commit diff --git a/examples/nextjs-api-sales/package.json b/examples/nextjs-api-sales/package.json index 9303874a77..481197b26c 100644 --- a/examples/nextjs-api-sales/package.json +++ b/examples/nextjs-api-sales/package.json @@ -22,6 +22,6 @@ "@types/react": "17.0.39", "eslint": "8.10.0", "eslint-config-next": "12.1.0", - "typescript": "5.2.2" + "typescript": "5.5.2" } } diff --git a/hosting/couchdb/Dockerfile b/hosting/couchdb/Dockerfile index ca72153e78..b95fa348f8 100644 --- a/hosting/couchdb/Dockerfile +++ b/hosting/couchdb/Dockerfile @@ -96,10 +96,13 @@ EXPOSE 5984 4369 9100 CMD ["/opt/couchdb/bin/couchdb"] FROM base as runner +ARG TARGETARCH +ENV TARGETARCH $TARGETARCH ENV COUCHDB_USER admin ENV COUCHDB_PASSWORD admin EXPOSE 5984 +EXPOSE 4984 RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \ wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \ @@ -125,7 +128,12 @@ ADD clouseau/log4j.properties clouseau/clouseau.ini ./ WORKDIR /opt/couchdb ADD couch/vm.args couch/local.ini ./etc/ +# setup SQS +WORKDIR /opt/sqs +ADD sqs ./ +RUN chmod +x ./install.sh && ./install.sh + WORKDIR / ADD runner.sh ./bbcouch-runner.sh -RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau -CMD ["./bbcouch-runner.sh"] \ No newline at end of file +RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs +CMD ["./bbcouch-runner.sh"] diff --git a/hosting/couchdb/Dockerfile.v2 b/hosting/couchdb/Dockerfile.v2 deleted file mode 100644 index 126742cadb..0000000000 --- a/hosting/couchdb/Dockerfile.v2 +++ /dev/null @@ -1,139 +0,0 @@ -# Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile -# -# Everything in this `base` image is adapted from the official `couchdb` image's -# Dockerfile. Only modifications related to upgrading from Debian bullseye to -# bookworm have been included. The `runner` image contains Budibase's -# customisations to the image, e.g. adding Clouseau. -FROM node:20-slim AS base - -# Add CouchDB user account to make sure the IDs are assigned consistently -RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb - -# be sure GPG and apt-transport-https are available and functional -RUN set -ex; \ - apt-get update; \ - apt-get install -y --no-install-recommends \ - apt-transport-https \ - ca-certificates \ - dirmngr \ - gnupg \ - ; \ - rm -rf /var/lib/apt/lists/* - -# grab tini for signal handling and zombie reaping -# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407 -RUN set -eux; \ - apt-get update; \ - apt-get install -y --no-install-recommends tini; \ - rm -rf /var/lib/apt/lists/*; \ - tini --version - -# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages -ENV GPG_COUCH_KEY \ -# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) - 390EF70BB1EA12B2773962950EE62FB37A00258D -RUN set -eux; \ - apt-get update; \ - apt-get install -y curl; \ - export GNUPGHOME="$(mktemp -d)"; \ - curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \ - gpg --batch --import keys.asc; \ - gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \ - command -v gpgconf && gpgconf --kill all || :; \ - rm -rf "$GNUPGHOME"; \ - apt-key list; \ - apt purge -y --autoremove curl; \ - rm -rf /var/lib/apt/lists/* - -ENV COUCHDB_VERSION 3.3.3 - -RUN . /etc/os-release; \ - echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \ - tee /etc/apt/sources.list.d/couchdb.list >/dev/null - -# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian -RUN set -eux; \ - apt-get update; \ - \ - echo "couchdb couchdb/mode select none" | debconf-set-selections; \ -# we DO want recommends this time - DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \ - couchdb="$COUCHDB_VERSION"~bookworm \ - ; \ -# Undo symlinks to /var/log and /var/lib - rmdir /var/lib/couchdb /var/log/couchdb; \ - rm /opt/couchdb/data /opt/couchdb/var/log; \ - mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \ - chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \ - chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \ -# Remove file that sets logging to a file - rm /opt/couchdb/etc/default.d/10-filelog.ini; \ -# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh - find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \ -# Setup directories and permissions for config. Technically these could be 555 and 444 respectively -# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh. - find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \ - find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \ -# only local.d needs to be writable for the docker_entrypoint.sh - chmod -f 0777 /opt/couchdb/etc/local.d; \ -# apt clean-up - rm -rf /var/lib/apt/lists/*; - -# Add configuration -COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/ -# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/ - -COPY docker-entrypoint.sh /usr/local/bin -RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat -ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"] - -VOLUME /opt/couchdb/data - -# 5984: Main CouchDB endpoint -# 4369: Erlang portmap daemon (epmd) -# 9100: CouchDB cluster communication port -EXPOSE 5984 4369 9100 -CMD ["/opt/couchdb/bin/couchdb"] - -FROM base as runner -ARG TARGETARCH -ENV TARGETARCH $TARGETARCH - -ENV COUCHDB_USER admin -ENV COUCHDB_PASSWORD admin -EXPOSE 5984 -EXPOSE 4984 - -RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \ - wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \ - apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \ - apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \ - apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \ - apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \ - rm -rf /var/lib/apt/lists/ - -# setup clouseau -WORKDIR / -RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \ - unzip clouseau-2.21.0-dist.zip && \ - mv clouseau-2.21.0 /opt/clouseau && \ - rm clouseau-2.21.0-dist.zip - -WORKDIR /opt/clouseau -RUN mkdir ./bin -ADD clouseau/clouseau ./bin/ -ADD clouseau/log4j.properties clouseau/clouseau.ini ./ - -# setup CouchDB -WORKDIR /opt/couchdb -ADD couch/vm.args couch/local.ini ./etc/ - -# setup SQS -WORKDIR /opt/sqs -ADD sqs ./ -RUN chmod +x ./install.sh && ./install.sh - -WORKDIR / -ADD runner.v2.sh ./bbcouch-runner.sh -RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs -CMD ["./bbcouch-runner.sh"] diff --git a/hosting/couchdb/runner.sh b/hosting/couchdb/runner.sh index aaadee6b43..f8cbe49b8f 100644 --- a/hosting/couchdb/runner.sh +++ b/hosting/couchdb/runner.sh @@ -70,9 +70,12 @@ sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouse /opt/clouseau/bin/clouseau > /dev/stdout 2>&1 & # Start CouchDB. -/docker-entrypoint.sh /opt/couchdb/bin/couchdb & +/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 & -# Wati for CouchDB to start up. +# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues. +/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 & + +# Wait for CouchDB to start up. while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do echo 'Waiting for CouchDB to start...'; sleep 5; @@ -82,4 +85,4 @@ done # function correctly, so we create them here. curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator -sleep infinity \ No newline at end of file +sleep infinity diff --git a/hosting/couchdb/runner.v2.sh b/hosting/couchdb/runner.v2.sh deleted file mode 100644 index f8cbe49b8f..0000000000 --- a/hosting/couchdb/runner.v2.sh +++ /dev/null @@ -1,88 +0,0 @@ -#!/bin/bash - -DATA_DIR=${DATA_DIR:-/data} -COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7} - -mkdir -p ${DATA_DIR} -mkdir -p ${DATA_DIR}/couch/{dbs,views} -mkdir -p ${DATA_DIR}/search -chown -R couchdb:couchdb ${DATA_DIR}/couch - -echo ${TARGETBUILD} > /buildtarget.txt -if [[ "${TARGETBUILD}" = "aas" ]]; then - # Azure AppService uses /home for persistent data & SSH on port 2222 - DATA_DIR="${DATA_DIR:-/home}" - WEBSITES_ENABLE_APP_SERVICE_STORAGE=true - mkdir -p $DATA_DIR/{search,minio,couch} - mkdir -p $DATA_DIR/couch/{dbs,views} - chown -R couchdb:couchdb $DATA_DIR/couch/ - apt update - apt-get install -y openssh-server - echo "root:Docker!" | chpasswd - mkdir -p /tmp - chmod +x /tmp/ssh_setup.sh \ - && (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null) - cp /etc/sshd_config /etc/ssh/sshd_config - /etc/init.d/ssh restart - sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini - sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini -elif [[ "${TARGETBUILD}" = "single" ]]; then - # In the single image build, the Dockerfile specifies /data as a volume - # mount, so we use that for all persistent data. - sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini - sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini -elif [[ "${TARGETBUILD}" = "docker-compose" ]]; then - # We remove the database_dir and view_index_dir settings from the local.ini - # in docker-compose because it will default to /opt/couchdb/data which is what - # our docker-compose was using prior to us switching to using our own CouchDB - # image. - sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini - sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini - sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini -elif [[ -n $KUBERNETES_SERVICE_HOST ]]; then - # In Kubernetes the directory /opt/couchdb/data has a persistent volume - # mount for storing database data. - sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini - - # We remove the database_dir and view_index_dir settings from the local.ini - # in Kubernetes because it will default to /opt/couchdb/data which is what - # our Helm chart was using prior to us switching to using our own CouchDB - # image. - sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini - sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini - - # We remove the -name setting from the vm.args file in Kubernetes because - # it will default to the pod FQDN, which is what's required for clustering - # to work. - sed -i "s/^-name .*$//g" /opt/couchdb/etc/vm.args -else - # For all other builds, we use /data for persistent data. - sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini - sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini -fi - -sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args -sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini - -# Start Clouseau. Budibase won't function correctly without Clouseau running, it -# powers the search API endpoints which are used to do all sorts, including -# populating app grids. -/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 & - -# Start CouchDB. -/docker-entrypoint.sh /opt/couchdb/bin/couchdb > /dev/stdout 2>&1 & - -# Start SQS. Use 127.0.0.1 instead of localhost to avoid IPv6 issues. -/opt/sqs/sqs --server "http://127.0.0.1:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 > /dev/stdout 2>&1 & - -# Wait for CouchDB to start up. -while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do - echo 'Waiting for CouchDB to start...'; - sleep 5; -done - -# CouchDB needs the `_users` and `_replicator` databases to exist before it will -# function correctly, so we create them here. -curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users -curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator -sleep infinity diff --git a/lerna.json b/lerna.json index 76d39a69b3..e92bdb224b 100644 --- a/lerna.json +++ b/lerna.json @@ -1,12 +1,12 @@ { - "version": "2.29.2", + "$schema": "node_modules/lerna/schemas/lerna-schema.json", + "version": "2.29.6", "npmClient": "yarn", "packages": [ "packages/*", "!packages/account-portal", "packages/account-portal/packages/*" ], - "useNx": true, "concurrency": 20, "command": { "publish": { diff --git a/nx.json b/nx.json index 8ba8798946..54db3a24a3 100644 --- a/nx.json +++ b/nx.json @@ -1,4 +1,5 @@ { + "$schema": "./node_modules/nx/schemas/nx-schema.json", "tasksRunnerOptions": { "default": { "runner": "nx-cloud", @@ -11,5 +12,10 @@ "build": { "inputs": ["{workspaceRoot}/scripts/*", "{workspaceRoot}/lerna.json"] } + }, + "namedInputs": { + "default": ["{projectRoot}/**/*", "sharedGlobals"], + "sharedGlobals": [], + "production": ["default"] } } diff --git a/package.json b/package.json index e05eb795bc..6416397c94 100644 --- a/package.json +++ b/package.json @@ -18,16 +18,15 @@ "eslint-plugin-svelte": "^2.34.0", "husky": "^8.0.3", "kill-port": "^1.6.1", - "lerna": "7.1.1", + "lerna": "7.4.2", "madge": "^6.0.0", - "nx": "16.4.3", "nx-cloud": "16.0.5", "prettier": "2.8.8", "prettier-plugin-svelte": "^2.3.0", "proper-lockfile": "^4.1.2", "svelte": "^4.2.10", "svelte-eslint-parser": "^0.33.1", - "typescript": "5.2.2", + "typescript": "5.5.2", "typescript-eslint": "^7.3.1", "yargs": "^17.7.2" }, @@ -78,7 +77,6 @@ "build:docker:single:sqs": "./scripts/build-single-image-sqs.sh", "build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting", "publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.3.3 --push ./hosting/couchdb", - "publish:docker:couch-sqs": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile.v2 -t budibase/couchdb:v3.3.3-sqs --push ./hosting/couchdb", "publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting", "release:helm": "node scripts/releaseHelmChart", "env:multi:enable": "lerna run --stream env:multi:enable", diff --git a/packages/account-portal b/packages/account-portal index b600cca314..ff16525b73 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit b600cca314a5cc9971e44d46047d1a0019b46b08 +Subproject commit ff16525b73c5751d344f5c161a682609c0a993f2 diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index f61059cc97..88b970884c 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -16,7 +16,7 @@ "prepack": "cp package.json dist", "build": "tsc -p tsconfig.build.json --paths null && node ./scripts/build.js", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "test": "bash scripts/test.sh", "test:watch": "jest --watchAll" }, @@ -79,7 +79,7 @@ "pouchdb-adapter-memory": "7.2.2", "testcontainers": "^10.7.2", "timekeeper": "2.2.0", - "typescript": "5.2.2" + "typescript": "5.5.2" }, "nx": { "targets": { diff --git a/packages/backend-core/src/sql/sql.ts b/packages/backend-core/src/sql/sql.ts index cdc5f3d3c8..615753efc3 100644 --- a/packages/backend-core/src/sql/sql.ts +++ b/packages/backend-core/src/sql/sql.ts @@ -184,7 +184,11 @@ class InternalBuilder { query: Knex.QueryBuilder, filters: SearchFilters | undefined, table: Table, - opts: { aliases?: Record; relationship?: boolean } + opts: { + aliases?: Record + relationship?: boolean + columnPrefix?: string + } ): Knex.QueryBuilder { if (!filters) { return query @@ -192,7 +196,10 @@ class InternalBuilder { filters = parseFilters(filters) // if all or specified in filters, then everything is an or const allOr = filters.allOr - const sqlStatements = new SqlStatements(this.client, table, { allOr }) + const sqlStatements = new SqlStatements(this.client, table, { + allOr, + columnPrefix: opts.columnPrefix, + }) const tableName = this.client === SqlClient.SQL_LITE ? table._id! : table.name @@ -397,9 +404,9 @@ class InternalBuilder { contains(filters.containsAny, true) } + const tableRef = opts?.aliases?.[table._id!] || table._id // when searching internal tables make sure long looking for rows - if (filters.documentType && !isExternalTable(table)) { - const tableRef = opts?.aliases?.[table._id!] || table._id + if (filters.documentType && !isExternalTable(table) && tableRef) { // has to be its own option, must always be AND onto the search query.andWhereLike( `${tableRef}._id`, @@ -663,6 +670,7 @@ class InternalBuilder { } // add filters to the query (where) query = this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, aliases: tableAliases, }) @@ -698,6 +706,7 @@ class InternalBuilder { } return this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, relationship: true, aliases: tableAliases, }) @@ -708,6 +717,7 @@ class InternalBuilder { let query = this.knexWithAlias(knex, endpoint, tableAliases) const parsedBody = parseBody(body) query = this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, aliases: tableAliases, }) // mysql can't use returning @@ -722,6 +732,7 @@ class InternalBuilder { const { endpoint, filters, tableAliases } = json let query = this.knexWithAlias(knex, endpoint, tableAliases) query = this.addFilters(query, filters, json.meta.table, { + columnPrefix: json.meta.columnPrefix, aliases: tableAliases, }) // mysql can't use returning diff --git a/packages/backend-core/src/sql/sqlStatements.ts b/packages/backend-core/src/sql/sqlStatements.ts index a80defd8b8..311f7c7d49 100644 --- a/packages/backend-core/src/sql/sqlStatements.ts +++ b/packages/backend-core/src/sql/sqlStatements.ts @@ -5,19 +5,27 @@ export class SqlStatements { client: string table: Table allOr: boolean | undefined + columnPrefix: string | undefined + constructor( client: string, table: Table, - { allOr }: { allOr?: boolean } = {} + { allOr, columnPrefix }: { allOr?: boolean; columnPrefix?: string } = {} ) { this.client = client this.table = table this.allOr = allOr + this.columnPrefix = columnPrefix } getField(key: string): FieldSchema | undefined { const fieldName = key.split(".")[1] - return this.table.schema[fieldName] + let found = this.table.schema[fieldName] + if (!found && this.columnPrefix) { + const prefixRemovedFieldName = fieldName.replace(this.columnPrefix, "") + found = this.table.schema[prefixRemovedFieldName] + } + return found } between( diff --git a/packages/bbui/src/Drawer/Drawer.svelte b/packages/bbui/src/Drawer/Drawer.svelte index 89ee92726d..1f38389a63 100644 --- a/packages/bbui/src/Drawer/Drawer.svelte +++ b/packages/bbui/src/Drawer/Drawer.svelte @@ -223,7 +223,7 @@ height: 420px; background: var(--background); border: var(--border-light); - z-index: 100; + z-index: 1000; border-radius: 8px; overflow: hidden; box-sizing: border-box; diff --git a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte index 57ca19ddb2..01fde43d12 100644 --- a/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte +++ b/packages/builder/src/components/automation/SetupPanel/AutomationBlockSetup.svelte @@ -43,7 +43,7 @@ EditorModes, } from "components/common/CodeEditor" import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte" - import { QueryUtils, Utils } from "@budibase/frontend-core" + import { QueryUtils, Utils, search } from "@budibase/frontend-core" import { getSchemaForDatasourcePlus, getEnvironmentBindings, @@ -75,7 +75,11 @@ $: schema = getSchemaForDatasourcePlus(tableId, { searchableSchema: true, }).schema - $: schemaFields = Object.values(schema || {}) + $: schemaFields = search.getFields( + $tables.list, + Object.values(schema || {}), + { allowLinks: true } + ) $: queryLimit = tableId?.includes("datasource") ? "∞" : "1000" $: isTrigger = block?.type === "TRIGGER" $: isUpdateRow = stepId === ActionStepID.UPDATE_ROW diff --git a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte index d79eedd194..e1ef6f1036 100644 --- a/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte +++ b/packages/builder/src/components/backend/DataTable/modals/CreateEditColumn.svelte @@ -495,11 +495,7 @@ newError.name = `Column name cannot start with an underscore.` } else if (fieldInfo.name && !fieldInfo.name.match(ValidColumnNameRegex)) { newError.name = `Illegal character; must be alpha-numeric.` - } else if ( - prohibited.some( - name => fieldInfo?.name?.toLowerCase() === name.toLowerCase() - ) - ) { + } else if (prohibited.some(name => fieldInfo?.name === name)) { newError.name = `${prohibited.join( ", " )} are not allowed as column names - case insensitive.` diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/PromptUser.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/PromptUser.svelte index b808733d08..77fb579aa4 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/PromptUser.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/PromptUser.svelte @@ -18,7 +18,7 @@
(parameters.customTitleText = e.detail)} {bindings} @@ -30,6 +30,22 @@ on:change={e => (parameters.confirmText = e.detail)} {bindings} /> + + + (parameters.confirmButtonText = e.detail)} + {bindings} + /> + + + (parameters.cancelButtonText = e.detail)} + {bindings} + />
diff --git a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte index e481bb4381..ed5e36cd65 100644 --- a/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte +++ b/packages/builder/src/components/design/settings/controls/FilterEditor/FilterEditor.svelte @@ -9,7 +9,8 @@ import { createEventDispatcher } from "svelte" import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding" import FilterBuilder from "./FilterBuilder.svelte" - import { selectedScreen } from "stores/builder" + import { tables, selectedScreen } from "stores/builder" + import { search } from "@budibase/frontend-core" const dispatch = createEventDispatcher() @@ -23,7 +24,11 @@ $: tempValue = value $: datasource = getDatasourceForProvider($selectedScreen, componentInstance) $: dsSchema = getSchemaForDatasource($selectedScreen, datasource)?.schema - $: schemaFields = Object.values(schema || dsSchema || {}) + $: schemaFields = search.getFields( + $tables.list, + Object.values(schema || dsSchema || {}), + { allowLinks: true } + ) $: text = getText(value?.filter(filter => filter.field)) async function saveFilter() { diff --git a/packages/cli/package.json b/packages/cli/package.json index 88d5926ae3..1722d45730 100644 --- a/packages/cli/package.json +++ b/packages/cli/package.json @@ -11,7 +11,7 @@ "scripts": { "tsc": "node ../../scripts/build.js", "build": "yarn tsc", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "start": "ts-node ./src/index.ts" }, "dependencies": { @@ -40,6 +40,6 @@ "@types/node-fetch": "2.6.4", "@types/pouchdb": "^6.4.0", "ts-node": "10.8.1", - "typescript": "5.2.2" + "typescript": "5.5.2" } } diff --git a/packages/client/manifest.json b/packages/client/manifest.json index 00b503626f..7a9d1a5695 100644 --- a/packages/client/manifest.json +++ b/packages/client/manifest.json @@ -23,17 +23,21 @@ { "type": "bigint", "message": "stringAsNumber" }, { "type": "options", "message": "stringAsNumber" }, { "type": "formula", "message": "stringAsNumber" }, - { "type": "datetime", "message": "dateAsNumber"} + { "type": "datetime", "message": "dateAsNumber" } ], - "unsupported": [ - { "type": "json", "message": "jsonPrimitivesOnly" } - ] + "unsupported": [{ "type": "json", "message": "jsonPrimitivesOnly" }] }, "stringLike": { - "supported": ["string", "number", "bigint", "options", "longform", "boolean", "datetime"], - "unsupported": [ - { "type": "json", "message": "jsonPrimitivesOnly" } - ] + "supported": [ + "string", + "number", + "bigint", + "options", + "longform", + "boolean", + "datetime" + ], + "unsupported": [{ "type": "json", "message": "jsonPrimitivesOnly" }] }, "datetimeLike": { "supported": ["datetime"], @@ -43,11 +47,9 @@ { "type": "options", "message": "stringAsDate" }, { "type": "formula", "message": "stringAsDate" }, { "type": "bigint", "message": "stringAsDate" }, - { "type": "number", "message": "numberAsDate"} + { "type": "number", "message": "numberAsDate" } ], - "unsupported": [ - { "type": "json", "message": "jsonPrimitivesOnly" } - ] + "unsupported": [{ "type": "json", "message": "jsonPrimitivesOnly" }] } }, "layout": { diff --git a/packages/client/src/components/preview/SettingsBar.svelte b/packages/client/src/components/preview/SettingsBar.svelte index b69b8ce050..c5109c6bca 100644 --- a/packages/client/src/components/preview/SettingsBar.svelte +++ b/packages/client/src/components/preview/SettingsBar.svelte @@ -41,7 +41,7 @@ allSettings.push(setting) } }) - return allSettings.filter(setting => setting.showInBar) + return allSettings.filter(setting => setting.showInBar && !setting.hidden) } const updatePosition = () => { diff --git a/packages/frontend-core/src/components/FilterBuilder.svelte b/packages/frontend-core/src/components/FilterBuilder.svelte index 6d1e1fa502..5f58c9ea7f 100644 --- a/packages/frontend-core/src/components/FilterBuilder.svelte +++ b/packages/frontend-core/src/components/FilterBuilder.svelte @@ -16,7 +16,6 @@ import { QueryUtils, Constants } from "@budibase/frontend-core" import { getContext } from "svelte" import FilterUsers from "./FilterUsers.svelte" - import { getFields } from "../utils/searchFields" const { OperatorOptions, DEFAULT_BB_DATASOURCE_ID } = Constants @@ -62,9 +61,7 @@ ] const context = getContext("context") - $: fieldOptions = getFields(tables, schemaFields || [], { - allowLinks: true, - }).map(field => ({ + $: fieldOptions = (schemaFields || []).map(field => ({ label: field.displayName || field.name, value: field.name, })) diff --git a/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte b/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte index 20cfdb1ec5..ead2c67787 100644 --- a/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte +++ b/packages/frontend-core/src/components/grid/layout/ButtonColumn.svelte @@ -3,6 +3,7 @@ import { Button } from "@budibase/bbui" import GridCell from "../cells/GridCell.svelte" import GridScrollWrapper from "./GridScrollWrapper.svelte" + import { BlankRowID } from "../lib/constants" const { renderedRows, @@ -17,6 +18,7 @@ isDragging, buttonColumnWidth, showVScrollbar, + dispatch, } = getContext("grid") let container @@ -89,6 +91,17 @@ {/each} +
($hoveredRowId = BlankRowID)} + on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} + > + dispatch("add-row-inline")} + /> +
@@ -129,8 +142,11 @@ align-items: center; gap: 4px; } + .blank :global(.cell:hover) { + cursor: pointer; + } - /* Add left cell border */ + /* Add left cell border to all cells */ .button-column :global(.cell) { border-left: var(--cell-border); } diff --git a/packages/frontend-core/src/components/grid/layout/Grid.svelte b/packages/frontend-core/src/components/grid/layout/Grid.svelte index 8a82209162..8ea9e2264d 100644 --- a/packages/frontend-core/src/components/grid/layout/Grid.svelte +++ b/packages/frontend-core/src/components/grid/layout/Grid.svelte @@ -26,7 +26,7 @@ MaxCellRenderOverflow, GutterWidth, DefaultRowHeight, - Padding, + VPadding, SmallRowHeight, ControlsHeight, ScrollBarSize, @@ -119,7 +119,7 @@ // Derive min height and make available in context const minHeight = derived(rowHeight, $height => { const heightForControls = showControls ? ControlsHeight : 0 - return Padding + SmallRowHeight + $height + heightForControls + return VPadding + SmallRowHeight + $height + heightForControls }) context = { ...context, minHeight } @@ -354,8 +354,13 @@ transition: none; } - /* Overrides */ - .grid.quiet :global(.grid-data-content .row > .cell:not(:last-child)) { + /* Overrides for quiet */ + .grid.quiet :global(.grid-data-content .row > .cell:not(:last-child)), + .grid.quiet :global(.sticky-column .row > .cell), + .grid.quiet :global(.new-row .row > .cell:not(:last-child)) { border-right: none; } + .grid.quiet :global(.sticky-column:before) { + display: none; + } diff --git a/packages/frontend-core/src/components/grid/layout/GridBody.svelte b/packages/frontend-core/src/components/grid/layout/GridBody.svelte index 8be56674be..cf93f3004e 100644 --- a/packages/frontend-core/src/components/grid/layout/GridBody.svelte +++ b/packages/frontend-core/src/components/grid/layout/GridBody.svelte @@ -2,6 +2,7 @@ import { getContext, onMount } from "svelte" import GridScrollWrapper from "./GridScrollWrapper.svelte" import GridRow from "./GridRow.svelte" + import GridCell from "../cells/GridCell.svelte" import { BlankRowID } from "../lib/constants" import ButtonColumn from "./ButtonColumn.svelte" @@ -46,7 +47,6 @@ -
{#each $renderedRows as row, idx} @@ -54,13 +54,16 @@ {/each} {#if $config.canAddRows}
($hoveredRowId = BlankRowID)} on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} - on:click={() => dispatch("add-row-inline")} - /> + > + dispatch("add-row-inline")} + /> +
{/if}
{#if $props.buttons?.length} @@ -76,15 +79,13 @@ overflow: hidden; flex: 1 1 auto; } - .blank { - height: var(--row-height); - background: var(--cell-background); - border-bottom: var(--cell-border); - border-right: var(--cell-border); - position: absolute; + .row { + display: flex; + flex-direction: row; + justify-content: flex-start; + align-items: stretch; } - .blank.highlighted { - background: var(--cell-background-hover); + .blank :global(.cell:hover) { cursor: pointer; } diff --git a/packages/frontend-core/src/components/grid/layout/NewRow.svelte b/packages/frontend-core/src/components/grid/layout/NewRow.svelte index 68ace8a5b2..0b27a5d135 100644 --- a/packages/frontend-core/src/components/grid/layout/NewRow.svelte +++ b/packages/frontend-core/src/components/grid/layout/NewRow.svelte @@ -31,6 +31,7 @@ filter, inlineFilters, columnRenderMap, + scrollTop, } = getContext("grid") let visible = false @@ -43,6 +44,21 @@ $: $datasource, (visible = false) $: selectedRowCount = Object.values($selectedRows).length $: hasNoRows = !$rows.length + $: renderedRowCount = $renderedRows.length + $: offset = getOffset($hasNextPage, renderedRowCount, $rowHeight, $scrollTop) + + const getOffset = (hasNextPage, rowCount, rowHeight, scrollTop) => { + // If we have a next page of data then we aren't truly at the bottom, so we + // render the add row component at the top + if (hasNextPage) { + return 0 + } + offset = rowCount * rowHeight - (scrollTop % rowHeight) + if (rowCount !== 0) { + offset -= 1 + } + return offset + } const addRow = async () => { // Blur the active cell and tick to let final value updates propagate @@ -85,23 +101,13 @@ return } - // If we have a next page of data then we aren't truly at the bottom, so we - // render the add row component at the top - if ($hasNextPage) { - offset = 0 - } - // If we don't have a next page then we're at the bottom and can scroll to // the max available offset - else { + if (!$hasNextPage) { scroll.update(state => ({ ...state, top: $maxScrollTop, })) - offset = $renderedRows.length * $rowHeight - ($maxScrollTop % $rowHeight) - if ($renderedRows.length !== 0) { - offset -= 1 - } } // Update state and select initial cell @@ -171,39 +177,41 @@ {#if visible}
0} style="--offset:{offset}px; --sticky-width:{width}px;" >
- - - {#if isAdding} -
- {/if} - - {#if $stickyColumn} - {@const cellId = getCellID(NewRowID, $stickyColumn.name)} - - {#if $stickyColumn?.schema?.autocolumn} -
Can't edit auto column
- {/if} +
+ + {#if isAdding}
{/if} - - {/if} + + {#if $stickyColumn} + {@const cellId = getCellID(NewRowID, $stickyColumn.name)} + + {#if $stickyColumn?.schema?.autocolumn} +
Can't edit auto column
+ {/if} + {#if isAdding} +
+ {/if} + + {/if} +
@@ -270,7 +278,7 @@ margin-left: -6px; } - .container { + .new-row { position: absolute; top: var(--default-row-height); left: 0; @@ -280,10 +288,10 @@ flex-direction: row; align-items: stretch; } - .container :global(.cell) { + .new-row :global(.cell) { --cell-background: var(--spectrum-global-color-gray-75) !important; } - .container.floating :global(.cell) { + .new-row.floating :global(.cell) { height: calc(var(--row-height) + 1px); border-top: var(--cell-border); } @@ -312,8 +320,10 @@ pointer-events: all; z-index: 3; position: absolute; - top: calc(var(--row-height) + var(--offset) + 24px); - left: 18px; + top: calc( + var(--row-height) + var(--offset) + var(--default-row-height) / 2 + ); + left: calc(var(--default-row-height) / 2); } .button-with-keys { display: flex; diff --git a/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte b/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte index b57c89ee4f..85c1eb2897 100644 --- a/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte +++ b/packages/frontend-core/src/components/grid/layout/StickyColumn.svelte @@ -66,62 +66,58 @@ -
- - {#each $renderedRows as row, idx} - {@const rowSelected = !!$selectedRows[row._id]} - {@const rowHovered = $hoveredRowId === row._id} - {@const rowFocused = $focusedRow?._id === row._id} - {@const cellId = getCellID(row._id, $stickyColumn?.name)} -
($hoveredRowId = row._id)} - on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} - on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))} - > - - {#if $stickyColumn} - - {/if} -
- {/each} - {#if $config.canAddRows} -
($hoveredRowId = BlankRowID)} - on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} - on:click={() => dispatch("add-row-inline")} - > - - - - {#if $stickyColumn} - - - - {/if} -
- {/if} -
-
+ + {#each $renderedRows as row, idx} + {@const rowSelected = !!$selectedRows[row._id]} + {@const rowHovered = $hoveredRowId === row._id} + {@const rowFocused = $focusedRow?._id === row._id} + {@const cellId = getCellID(row._id, $stickyColumn?.name)} +
($hoveredRowId = row._id)} + on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} + on:click={() => dispatch("rowclick", rows.actions.cleanRow(row))} + > + + {#if $stickyColumn} + + {/if} +
+ {/each} + {#if $config.canAddRows} +
($hoveredRowId = BlankRowID)} + on:mouseleave={$isDragging ? null : () => ($hoveredRowId = null)} + on:click={() => dispatch("add-row-inline")} + > + + + + {#if $stickyColumn} + + + + {/if} +
+ {/if} +
diff --git a/packages/frontend-core/src/components/grid/lib/constants.js b/packages/frontend-core/src/components/grid/lib/constants.js index 4b5d04894a..6ea7a98178 100644 --- a/packages/frontend-core/src/components/grid/lib/constants.js +++ b/packages/frontend-core/src/components/grid/lib/constants.js @@ -1,12 +1,13 @@ -export const Padding = 100 -export const ScrollBarSize = 8 -export const GutterWidth = 72 -export const DefaultColumnWidth = 200 -export const MinColumnWidth = 80 export const SmallRowHeight = 36 export const MediumRowHeight = 64 export const LargeRowHeight = 92 export const DefaultRowHeight = SmallRowHeight +export const VPadding = SmallRowHeight * 2 +export const HPadding = 40 +export const ScrollBarSize = 8 +export const GutterWidth = 72 +export const DefaultColumnWidth = 200 +export const MinColumnWidth = 80 export const NewRowID = "new" export const BlankRowID = "blank" export const RowPageSize = 100 diff --git a/packages/frontend-core/src/components/grid/stores/scroll.js b/packages/frontend-core/src/components/grid/stores/scroll.js index e7114cd00c..814d4cdc8c 100644 --- a/packages/frontend-core/src/components/grid/stores/scroll.js +++ b/packages/frontend-core/src/components/grid/stores/scroll.js @@ -1,6 +1,12 @@ import { writable, derived, get } from "svelte/store" import { tick } from "svelte" -import { Padding, GutterWidth, FocusedCellMinOffset } from "../lib/constants" +import { + GutterWidth, + FocusedCellMinOffset, + ScrollBarSize, + HPadding, + VPadding, +} from "../lib/constants" import { parseCellID } from "../lib/utils" export const createStores = () => { @@ -34,28 +40,15 @@ export const deriveStores = context => { // Memoize store primitives const stickyColumnWidth = derived(stickyColumn, $col => $col?.width || 0, 0) - // Derive vertical limits - const contentHeight = derived( - [rows, rowHeight], - ([$rows, $rowHeight]) => ($rows.length + 1) * $rowHeight + Padding, - 0 - ) - const maxScrollTop = derived( - [height, contentHeight], - ([$height, $contentHeight]) => Math.max($contentHeight - $height, 0), - 0 - ) - // Derive horizontal limits const contentWidth = derived( [visibleColumns, stickyColumnWidth, buttonColumnWidth], ([$visibleColumns, $stickyColumnWidth, $buttonColumnWidth]) => { - const space = Math.max(Padding, $buttonColumnWidth - 1) - let width = GutterWidth + space + $stickyColumnWidth + let width = GutterWidth + $buttonColumnWidth + $stickyColumnWidth $visibleColumns.forEach(col => { width += col.width }) - return width + return width + HPadding }, 0 ) @@ -71,14 +64,6 @@ export const deriveStores = context => { }, 0 ) - - // Derive whether to show scrollbars or not - const showVScrollbar = derived( - [contentHeight, height], - ([$contentHeight, $height]) => { - return $contentHeight > $height - } - ) const showHScrollbar = derived( [contentWidth, screenWidth], ([$contentWidth, $screenWidth]) => { @@ -86,6 +71,30 @@ export const deriveStores = context => { } ) + // Derive vertical limits + const contentHeight = derived( + [rows, rowHeight, showHScrollbar], + ([$rows, $rowHeight, $showHScrollbar]) => { + let height = ($rows.length + 1) * $rowHeight + VPadding + if ($showHScrollbar) { + height += ScrollBarSize * 2 + } + return height + }, + 0 + ) + const maxScrollTop = derived( + [height, contentHeight], + ([$height, $contentHeight]) => Math.max($contentHeight - $height, 0), + 0 + ) + const showVScrollbar = derived( + [contentHeight, height], + ([$contentHeight, $height]) => { + return $contentHeight > $height + } + ) + return { contentHeight, contentWidth, diff --git a/packages/frontend-core/src/utils/searchFields.js b/packages/frontend-core/src/utils/searchFields.js index dec6e93480..294f2c0f38 100644 --- a/packages/frontend-core/src/utils/searchFields.js +++ b/packages/frontend-core/src/utils/searchFields.js @@ -2,7 +2,6 @@ import { BannedSearchTypes } from "../constants" export function getTableFields(tables, linkField) { const table = tables.find(table => table._id === linkField.tableId) - // TODO: mdrury - add support for this with SQS at some point if (!table || !table.sql) { return [] } @@ -11,7 +10,7 @@ export function getTableFields(tables, linkField) { }) return linkFields.map(field => ({ ...field, - name: `${table.name}.${field.name}`, + name: `${linkField.name}.${field.name}`, })) } diff --git a/packages/pro b/packages/pro index 6c8d0174ca..e8f2c5a147 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 6c8d0174ca58c578a37022965ddb923fdbf8e32a +Subproject commit e8f2c5a14780e1f61ec3896821ba5f93d486eb72 diff --git a/packages/server/package.json b/packages/server/package.json index e146bd081c..94bbb6fc6b 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -12,7 +12,7 @@ "prebuild": "rimraf dist/", "build": "node ./scripts/build.js", "postbuild": "copyfiles -f ../client/dist/budibase-client.js ../client/manifest.json client && copyfiles -f ../../yarn.lock ./dist/", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "build:isolated-vm-lib:snippets": "esbuild --minify --bundle src/jsRunner/bundles/snippets.ts --outfile=src/jsRunner/bundles/snippets.ivm.bundle.js --platform=node --format=iife --global-name=snippets", "build:isolated-vm-lib:string-templates": "esbuild --minify --bundle src/jsRunner/bundles/index-helpers.ts --outfile=src/jsRunner/bundles/index-helpers.ivm.bundle.js --platform=node --format=iife --external:handlebars --global-name=helpers", "build:isolated-vm-lib:bson": "esbuild --minify --bundle src/jsRunner/bundles/bsonPackage.ts --outfile=src/jsRunner/bundles/bson.ivm.bundle.js --platform=node --format=iife --global-name=bson", @@ -99,7 +99,7 @@ "mysql2": "3.9.8", "node-fetch": "2.6.7", "object-sizeof": "2.6.1", - "openai": "^3.2.1", + "openai": "^4.52.1", "openapi-types": "9.3.1", "pg": "8.10.0", "pouchdb": "7.3.0", @@ -152,7 +152,7 @@ "timekeeper": "2.2.0", "ts-node": "10.8.1", "tsconfig-paths": "4.0.0", - "typescript": "5.2.2", + "typescript": "5.5.2", "update-dotenv": "1.1.1", "yargs": "13.2.4" }, diff --git a/packages/server/src/api/controllers/row/views.ts b/packages/server/src/api/controllers/row/views.ts index 63ce12f0ab..12e76155bc 100644 --- a/packages/server/src/api/controllers/row/views.ts +++ b/packages/server/src/api/controllers/row/views.ts @@ -25,7 +25,9 @@ export async function searchView( ctx.throw(400, `This method only supports viewsV2`) } - const viewFields = Object.keys(view.schema || {}) + const viewFields = Object.entries(view.schema || {}) + .filter(([_, value]) => value.visible) + .map(([key]) => key) const { body } = ctx.request // Enrich saved query with ephemeral query params. diff --git a/packages/server/src/api/controllers/view/viewsV2.ts b/packages/server/src/api/controllers/view/viewsV2.ts index 76807b796a..4208772fa6 100644 --- a/packages/server/src/api/controllers/view/viewsV2.ts +++ b/packages/server/src/api/controllers/view/viewsV2.ts @@ -33,11 +33,6 @@ async function parseSchema(view: CreateViewRequest) { p[fieldName] = fieldSchema return p }, {} as Record>) - for (let [key, column] of Object.entries(finalViewSchema)) { - if (!column.visible && !column.readonly) { - delete finalViewSchema[key] - } - } return finalViewSchema } diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 589f129f31..5cd28f4506 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -9,20 +9,20 @@ import { db as dbCore, utils } from "@budibase/backend-core" import * as setup from "./utilities" import { AutoFieldSubType, + BBReferenceFieldSubType, Datasource, EmptyFilterOption, - BBReferenceFieldSubType, FieldType, + RelationshipType, + Row, RowSearchParams, SearchFilters, + SearchResponse, SortOrder, SortType, Table, TableSchema, User, - Row, - RelationshipType, - SearchResponse, } from "@budibase/types" import _ from "lodash" import tk from "timekeeper" @@ -1938,6 +1938,17 @@ describe.each([ ]) }) + it("successfully finds a row searching with a string", async () => { + await expectQuery({ + // @ts-expect-error this test specifically goes against the type to + // test that we coerce the string to an array. + contains: { "1:users": user1._id }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + it("fails to find nonexistent row", async () => { await expectQuery({ contains: { users: ["us_none"] } }).toFindNothing() }) @@ -2073,6 +2084,28 @@ describe.each([ }) }) + isInternal && + describe("no column error backwards compat", () => { + beforeAll(async () => { + table = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + }) + + it("shouldn't error when column doesn't exist", async () => { + await expectSearch({ + query: { + string: { + "1:something": "a", + }, + }, + }).toMatch({ rows: [] }) + }) + }) + // lucene can't count the total rows !isLucene && describe("row counting", () => { @@ -2108,4 +2141,29 @@ describe.each([ }).toNotHaveProperty(["totalRows"]) }) }) + + describe.each(["data_name_test", "name_data_test", "name_test_data_"])( + "special (%s) case", + column => { + beforeAll(async () => { + table = await createTable({ + [column]: { + name: column, + type: FieldType.STRING, + }, + }) + await createRows([{ [column]: "a" }, { [column]: "b" }]) + }) + + it("should be able to query a column with data_ in it", async () => { + await expectSearch({ + query: { + equal: { + [`1:${column}`]: "a", + }, + }, + }).toContainExactly([{ [column]: "a" }]) + }) + } + ) }) diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index e75e5e23e7..8102966ad1 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -285,12 +285,9 @@ describe.each([ type: FieldType.STRING, name: "Type", } + // allow the "Type" column - internal columns aren't case sensitive await config.api.table.save(saveTableRequest, { - status: 400, - body: { - message: - 'Column(s) "type" are duplicated - check for other columns with these name (case in-sensitive)', - }, + status: 200, }) saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" } saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" } @@ -299,7 +296,7 @@ describe.each([ status: 400, body: { message: - 'Column(s) "type, foo" are duplicated - check for other columns with these name (case in-sensitive)', + 'Column(s) "foo" are duplicated - check for other columns with these name (case in-sensitive)', }, }) }) diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index 99ff4f8db7..43a6d39172 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -218,6 +218,10 @@ describe.each([ order: 1, width: 100, }, + Category: { + visible: false, + icon: "ic", + }, }, id: createdView.id, version: 2, @@ -269,9 +273,8 @@ describe.each([ ...newView, schema: { id: { visible: true }, - Price: { - visible: true, - }, + Price: { visible: true }, + Category: { visible: false }, }, id: expect.any(String), version: 2, @@ -759,6 +762,7 @@ describe.each([ order: 1, width: 100, }, + Category: { visible: false, icon: "ic" }, }, id: view.id, version: 2, @@ -873,30 +877,23 @@ describe.each([ await db.getDB(config.appId!).put(tableToUpdate) view = await config.api.viewV2.get(view.id) - await config.api.viewV2.update({ - ...view, - schema: { - ...view.schema, - Price: { - visible: false, + await config.api.viewV2.update( + { + ...view, + schema: { + ...view.schema, + Price: { + visible: false, + }, }, }, - }) - - expect(await config.api.viewV2.get(view.id)).toEqual( - expect.objectContaining({ - schema: { - id: expect.objectContaining({ - visible: false, - }), - Price: expect.objectContaining({ - visible: false, - }), - Category: expect.objectContaining({ - visible: true, - }), + { + status: 400, + body: { + message: 'You can\'t hide "id" because it is a required field.', + status: 400, }, - }) + } ) }) }) @@ -938,7 +935,6 @@ describe.each([ Category: { visible: true }, }, }) - expect(res.schema?.Price).toBeUndefined() const view = await config.api.viewV2.get(res.id) const updatedTable = await config.api.table.get(table._id!) @@ -1205,6 +1201,7 @@ describe.each([ ], schema: { id: { visible: true }, + one: { visible: false }, two: { visible: true }, }, }) diff --git a/packages/server/src/appMigrations/migrations/tests/20240604153647_initial_sqs.spec.ts b/packages/server/src/appMigrations/migrations/tests/20240604153647_initial_sqs.spec.ts index 86e50a5812..572e694855 100644 --- a/packages/server/src/appMigrations/migrations/tests/20240604153647_initial_sqs.spec.ts +++ b/packages/server/src/appMigrations/migrations/tests/20240604153647_initial_sqs.spec.ts @@ -15,6 +15,7 @@ import { import { processMigrations } from "../../migrationsProcessor" import migration from "../20240604153647_initial_sqs" import { AppMigration } from "src/appMigrations" +import sdk from "../../../sdk" const MIGRATIONS: AppMigration[] = [ { @@ -27,6 +28,8 @@ const MIGRATIONS: AppMigration[] = [ const config = setup.getConfig() let tableId: string +const prefix = sdk.tables.sqs.mapToUserColumn + function oldLinkDocInfo() { const tableId1 = `${DocumentType.TABLE}_a`, tableId2 = `${DocumentType.TABLE}_b` @@ -102,8 +105,14 @@ describe("SQS migration", () => { expect(designDoc.sql.tables).toBeDefined() const mainTableDef = designDoc.sql.tables[tableId] expect(mainTableDef).toBeDefined() - expect(mainTableDef.fields.name).toEqual(SQLiteType.TEXT) - expect(mainTableDef.fields.description).toEqual(SQLiteType.TEXT) + expect(mainTableDef.fields[prefix("name")]).toEqual({ + field: "name", + type: SQLiteType.TEXT, + }) + expect(mainTableDef.fields[prefix("description")]).toEqual({ + field: "description", + type: SQLiteType.TEXT, + }) const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo() const linkDoc = await db.get(oldLinkDocID()) diff --git a/packages/server/src/automations/loopUtils.ts b/packages/server/src/automations/loopUtils.ts new file mode 100644 index 0000000000..5ee2559050 --- /dev/null +++ b/packages/server/src/automations/loopUtils.ts @@ -0,0 +1,36 @@ +import * as automationUtils from "./automationUtils" + +type ObjValue = { + [key: string]: string | ObjValue +} + +export function replaceFakeBindings( + originalStepInput: Record, + loopStepNumber: number +) { + for (const [key, value] of Object.entries(originalStepInput)) { + originalStepInput[key] = replaceBindingsRecursive(value, loopStepNumber) + } + return originalStepInput +} + +function replaceBindingsRecursive( + value: string | ObjValue, + loopStepNumber: number +) { + if (typeof value === "object") { + for (const [innerKey, innerValue] of Object.entries(value)) { + if (typeof innerValue === "string") { + value[innerKey] = automationUtils.substituteLoopStep( + innerValue, + `steps.${loopStepNumber}` + ) + } else if (typeof innerValue === "object") { + value[innerKey] = replaceBindingsRecursive(innerValue, loopStepNumber) + } + } + } else if (typeof value === "string") { + value = automationUtils.substituteLoopStep(value, `steps.${loopStepNumber}`) + } + return value +} diff --git a/packages/server/src/automations/steps/filter.ts b/packages/server/src/automations/steps/filter.ts index 6867809500..624619bb95 100644 --- a/packages/server/src/automations/steps/filter.ts +++ b/packages/server/src/automations/steps/filter.ts @@ -73,7 +73,12 @@ export async function run({ inputs }: AutomationStepInput) { try { let { field, condition, value } = inputs // coerce types so that we can use them - if (!isNaN(value) && !isNaN(field)) { + if ( + !isNaN(value) && + !isNaN(field) && + typeof field !== "boolean" && + typeof value !== "boolean" + ) { value = parseFloat(value) field = parseFloat(field) } else if (!isNaN(Date.parse(value)) && !isNaN(Date.parse(field))) { diff --git a/packages/server/src/automations/steps/openai.ts b/packages/server/src/automations/steps/openai.ts index bc926de7b7..45ef5ef703 100644 --- a/packages/server/src/automations/steps/openai.ts +++ b/packages/server/src/automations/steps/openai.ts @@ -1,4 +1,5 @@ -import { Configuration, OpenAIApi } from "openai" +import { OpenAI } from "openai" + import { AutomationActionStepId, AutomationStepSchema, @@ -75,13 +76,11 @@ export async function run({ inputs }: AutomationStepInput) { } try { - const configuration = new Configuration({ + const openai = new OpenAI({ apiKey: environment.OPENAI_API_KEY, }) - const openai = new OpenAIApi(configuration) - - const completion = await openai.createChatCompletion({ + const completion = await openai.chat.completions.create({ model: inputs.model, messages: [ { @@ -90,8 +89,7 @@ export async function run({ inputs }: AutomationStepInput) { }, ], }) - - const response = completion?.data?.choices[0]?.message?.content + const response = completion?.choices[0]?.message?.content return { response, diff --git a/packages/server/src/automations/tests/openai.spec.ts b/packages/server/src/automations/tests/openai.spec.ts index bd73827cce..618c2d7754 100644 --- a/packages/server/src/automations/tests/openai.spec.ts +++ b/packages/server/src/automations/tests/openai.spec.ts @@ -1,15 +1,13 @@ const setup = require("./utilities") import environment from "../../environment" -import openai from "openai" +import { OpenAI } from "openai" -jest.mock( - "openai", - jest.fn(() => ({ - Configuration: jest.fn(), - OpenAIApi: jest.fn(() => ({ - createChatCompletion: jest.fn(() => ({ - data: { +jest.mock("openai", () => ({ + OpenAI: jest.fn().mockImplementation(() => ({ + chat: { + completions: { + create: jest.fn(() => ({ choices: [ { message: { @@ -17,15 +15,13 @@ jest.mock( }, }, ], - }, - })), - })), - })) -) + })), + }, + }, + })), +})) -const mockedOpenAIApi = openai.OpenAIApi as jest.MockedClass< - typeof openai.OpenAIApi -> +const mockedOpenAI = OpenAI as jest.MockedClass const OPENAI_PROMPT = "What is the meaning of life?" @@ -73,14 +69,18 @@ describe("test the openai action", () => { }) it("should present the correct error message when an error is thrown from the createChatCompletion call", async () => { - mockedOpenAIApi.mockImplementation( + mockedOpenAI.mockImplementation( () => ({ - createChatCompletion: jest.fn(() => { - throw new Error( - "An error occurred while calling createChatCompletion" - ) - }), + chat: { + completions: { + create: jest.fn(() => { + throw new Error( + "An error occurred while calling createChatCompletion" + ) + }), + }, + }, } as any) ) diff --git a/packages/server/src/jsRunner/tests/jsRunner.spec.ts b/packages/server/src/jsRunner/tests/jsRunner.spec.ts index 7448555aac..9d2bd7104d 100644 --- a/packages/server/src/jsRunner/tests/jsRunner.spec.ts +++ b/packages/server/src/jsRunner/tests/jsRunner.spec.ts @@ -91,8 +91,13 @@ describe("jsRunner (using isolated-vm)", () => { }) it("handle test case 2", async () => { + const todayDate = new Date() + // add a year and a month + todayDate.setMonth(new Date().getMonth() + 1) + todayDate.setFullYear(todayDate.getFullYear() + 1) const context = { "Purchase Date": DATE, + today: todayDate.toISOString(), } const result = await processJS( ` @@ -100,7 +105,7 @@ describe("jsRunner (using isolated-vm)", () => { let purchaseyear = purchase.getFullYear(); let purchasemonth = purchase.getMonth(); - var today = new Date (); + var today = new Date($("today")); let todayyear = today.getFullYear(); let todaymonth = today.getMonth(); @@ -113,7 +118,7 @@ describe("jsRunner (using isolated-vm)", () => { context ) expect(result).toBeDefined() - expect(result).toBe(3) + expect(result).toBe(1) }) it("should handle test case 3", async () => { diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index 1dc0e37a0c..286a88054c 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -2,6 +2,7 @@ import { EmptyFilterOption, Row, RowSearchParams, + SearchFilterOperator, SearchFilters, SearchResponse, SortOrder, @@ -65,11 +66,37 @@ export function removeEmptyFilters(filters: SearchFilters) { return filters } +// The frontend can send single values for array fields sometimes, so to handle +// this we convert them to arrays at the controller level so that nothing below +// this has to worry about the non-array values. +function fixupFilterArrays(filters: SearchFilters) { + const arrayFields = [ + SearchFilterOperator.ONE_OF, + SearchFilterOperator.CONTAINS, + SearchFilterOperator.NOT_CONTAINS, + SearchFilterOperator.CONTAINS_ANY, + ] + for (const searchField of arrayFields) { + const field = filters[searchField] + if (field == null) { + continue + } + + for (const key of Object.keys(field)) { + if (!Array.isArray(field[key])) { + field[key] = [field[key]] + } + } + } + return filters +} + export async function search( options: RowSearchParams ): Promise> { const isExternalTable = isExternalTableID(options.tableId) options.query = removeEmptyFilters(options.query || {}) + options.query = fixupFilterArrays(options.query) if ( !dataFilters.hasFilters(options.query) && options.query.onEmptyFilter === EmptyFilterOption.RETURN_NONE diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts index 174ecc0e38..e3aedf9de8 100644 --- a/packages/server/src/sdk/app/rows/search/sqs.ts +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -18,6 +18,7 @@ import { buildInternalRelationships, sqlOutputProcessing, } from "../../../../api/controllers/row/utils" +import { mapToUserColumn, USER_COLUMN_PREFIX } from "../../tables/internal/sqs" import sdk from "../../../index" import { context, @@ -35,8 +36,13 @@ import { getRelationshipColumns, getTableIDList, } from "./filters" +import { dataFilters } from "@budibase/shared-core" const builder = new sql.Sql(SqlClient.SQL_LITE) +const MISSING_COLUMN_REGEX = new RegExp(`no such column: .+`) +const USER_COLUMN_PREFIX_REGEX = new RegExp( + `no such column: .+${USER_COLUMN_PREFIX}` +) function buildInternalFieldList( table: Table, @@ -59,7 +65,7 @@ function buildInternalFieldList( buildInternalFieldList(relatedTable, tables, { relationships: false }) ) } else { - fieldList.push(`${table._id}.${col.name}`) + fieldList.push(`${table._id}.${mapToUserColumn(col.name)}`) } } return fieldList @@ -90,6 +96,34 @@ function cleanupFilters( ) ) + // generate a map of all possible column names (these can be duplicated across tables + // the map of them will always be the same + const userColumnMap: Record = {} + allTables.forEach(table => + Object.keys(table.schema).forEach( + key => (userColumnMap[key] = mapToUserColumn(key)) + ) + ) + + // update the keys of filters to manage user columns + const keyInAnyTable = (key: string): boolean => + allTables.some(table => table.schema[key]) + + const splitter = new dataFilters.ColumnSplitter(allTables) + for (const filter of Object.values(filters)) { + for (const key of Object.keys(filter)) { + const { numberPrefix, relationshipPrefix, column } = splitter.run(key) + if (keyInAnyTable(column)) { + filter[ + `${numberPrefix || ""}${relationshipPrefix || ""}${mapToUserColumn( + column + )}` + ] = filter[key] + delete filter[key] + } + } + } + return filters } @@ -106,6 +140,25 @@ function buildTableMap(tables: Table[]) { return tableMap } +function reverseUserColumnMapping(rows: Row[]) { + const prefixLength = USER_COLUMN_PREFIX.length + return rows.map(row => { + const finalRow: Row = {} + for (let key of Object.keys(row)) { + // it should be the first prefix + const index = key.indexOf(USER_COLUMN_PREFIX) + if (index !== -1) { + // cut out the prefix + const newKey = key.slice(0, index) + key.slice(index + prefixLength) + finalRow[newKey] = row[key] + } else { + finalRow[key] = row[key] + } + } + return finalRow + }) +} + function runSqlQuery(json: QueryJson, tables: Table[]): Promise function runSqlQuery( json: QueryJson, @@ -147,9 +200,10 @@ async function runSqlQuery( const response = await alias.queryWithAliasing(json, processSQLQuery) if (opts?.countTotalRows) { return processRowCountResponse(response) - } else { - return response + } else if (Array.isArray(response)) { + return reverseUserColumnMapping(response) } + return response } export async function search( @@ -185,6 +239,7 @@ export async function search( meta: { table, tables: allTablesMap, + columnPrefix: USER_COLUMN_PREFIX, }, resource: { fields: buildInternalFieldList(table, allTables), @@ -197,7 +252,7 @@ export async function search( const sortType = sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING request.sort = { - [sortField.name]: { + [mapToUserColumn(sortField.name)]: { direction: params.sortOrder || SortOrder.ASCENDING, type: sortType as SortType, }, @@ -278,10 +333,17 @@ export async function search( return response } catch (err: any) { const msg = typeof err === "string" ? err : err.message - if (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) { + const syncAndRepeat = + (err.status === 400 && msg?.match(USER_COLUMN_PREFIX_REGEX)) || + (err.status === 404 && msg?.includes(SQLITE_DESIGN_DOC_ID)) + if (syncAndRepeat) { await sdk.tables.sqs.syncDefinition() return search(options, table) } + // previously the internal table didn't error when a column didn't exist in search + if (err.status === 400 && msg?.match(MISSING_COLUMN_REGEX)) { + return { rows: [] } + } throw new Error(`Unable to search by SQL - ${msg}`, { cause: err }) } } diff --git a/packages/server/src/sdk/app/tables/internal/sqs.ts b/packages/server/src/sdk/app/tables/internal/sqs.ts index 4819b9d8d5..f892a9c6c8 100644 --- a/packages/server/src/sdk/app/tables/internal/sqs.ts +++ b/packages/server/src/sdk/app/tables/internal/sqs.ts @@ -62,10 +62,18 @@ function buildRelationshipDefinitions( } } +export const USER_COLUMN_PREFIX = "data_" + +// utility function to denote that columns in SQLite are mapped to avoid overlap issues +// the overlaps can occur due to case insensitivity and some of the columns which Budibase requires +export function mapToUserColumn(key: string) { + return `${USER_COLUMN_PREFIX}${key}` +} + // this can generate relationship tables as part of the mapping function mapTable(table: Table): SQLiteTables { const tables: SQLiteTables = {} - const fields: Record = {} + const fields: Record = {} for (let [key, column] of Object.entries(table.schema)) { // relationships should be handled differently if (column.type === FieldType.LINK) { @@ -78,7 +86,10 @@ function mapTable(table: Table): SQLiteTables { if (!FieldTypeMap[column.type]) { throw new Error(`Unable to map type "${column.type}" to SQLite type`) } - fields[key] = FieldTypeMap[column.type] + fields[mapToUserColumn(key)] = { + field: key, + type: FieldTypeMap[column.type], + } } // there are some extra columns to map - add these in const constantMap: Record = {} diff --git a/packages/server/src/sdk/app/views/index.ts b/packages/server/src/sdk/app/views/index.ts index b6ac7b6f6b..fce57a390d 100644 --- a/packages/server/src/sdk/app/views/index.ts +++ b/packages/server/src/sdk/app/views/index.ts @@ -160,14 +160,10 @@ export function enrichSchema( for (const key of Object.keys(schema)) { // if nothing specified in view, then it is not visible const ui = view.schema?.[key] || { visible: false } - if (ui.visible === false) { - schema[key].visible = false - } else { - schema[key] = { - ...schema[key], - ...ui, - order: anyViewOrder ? ui?.order ?? undefined : schema[key].order, - } + schema[key] = { + ...schema[key], + ...ui, + order: anyViewOrder ? ui?.order ?? undefined : schema[key].order, } } diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index 469d0845c9..a7cf71de4b 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -7,6 +7,8 @@ import { } from "../automations/utils" import * as actions from "../automations/actions" import * as automationUtils from "../automations/automationUtils" +import { replaceFakeBindings } from "../automations/loopUtils" + import { default as AutomationEmitter } from "../events/AutomationEmitter" import { generateAutomationMetadataID, isProdAppID } from "../db/utils" import { definitions as triggerDefs } from "../automations/triggerInfo" @@ -214,15 +216,15 @@ class Orchestrator { } updateContextAndOutput( - loopStepNumber: number | undefined, + currentLoopStepIndex: number | undefined, step: AutomationStep, output: any, result: { success: boolean; status: string } ) { - if (!loopStepNumber) { + if (!currentLoopStepIndex) { throw new Error("No loop step number provided.") } - this.executionOutput.steps.splice(loopStepNumber, 0, { + this.executionOutput.steps.splice(currentLoopStepIndex, 0, { id: step.id, stepId: step.stepId, outputs: { @@ -232,7 +234,7 @@ class Orchestrator { }, inputs: step.inputs, }) - this._context.steps.splice(loopStepNumber, 0, { + this._context.steps.splice(currentLoopStepIndex, 0, { ...output, success: result.success, status: result.status, @@ -256,7 +258,7 @@ class Orchestrator { let loopStep: LoopStep | undefined = undefined let stepCount = 0 - let loopStepNumber: any = undefined + let currentLoopStepIndex: number = 0 let loopSteps: LoopStep[] | undefined = [] let metadata let timeoutFlag = false @@ -290,7 +292,7 @@ class Orchestrator { }, }) - let input: any, + let input: LoopInput | undefined, iterations = 1, iterationCount = 0 @@ -309,19 +311,19 @@ class Orchestrator { stepCount++ if (step.stepId === LOOP_STEP_ID) { loopStep = step as LoopStep - loopStepNumber = stepCount + currentLoopStepIndex = stepCount continue } if (loopStep) { input = await processObject(loopStep.inputs, this._context) - iterations = getLoopIterations(loopStep as LoopStep) + iterations = getLoopIterations(loopStep) stepSpan?.addTags({ step: { iterations } }) } - for (let index = 0; index < iterations; index++) { + + for (let stepIndex = 0; stepIndex < iterations; stepIndex++) { let originalStepInput = cloneDeep(step.inputs) - // Handle if the user has set a max iteration count or if it reaches the max limit set by us - if (loopStep && input.binding) { + if (loopStep && input?.binding) { let tempOutput = { items: loopSteps, iterations: iterationCount, @@ -332,7 +334,7 @@ class Orchestrator { ) } catch (err) { this.updateContextAndOutput( - loopStepNumber, + currentLoopStepIndex, step, tempOutput, { @@ -353,55 +355,22 @@ class Orchestrator { } else if (Array.isArray(loopStep.inputs.binding)) { item = loopStep.inputs.binding } - this._context.steps[loopStepNumber] = { - currentItem: item[index], + this._context.steps[currentLoopStepIndex] = { + currentItem: item[stepIndex], } - // The "Loop" binding in the front end is "fake", so replace it here so the context can understand it - // Pretty hacky because we need to account for the row object - for (let [key, value] of Object.entries(originalStepInput)) { - if (typeof value === "object") { - for (let [innerKey, innerValue] of Object.entries( - originalStepInput[key] - )) { - if (typeof innerValue === "string") { - originalStepInput[key][innerKey] = - automationUtils.substituteLoopStep( - innerValue, - `steps.${loopStepNumber}` - ) - } else if (typeof value === "object") { - for (let [innerObject, innerValue] of Object.entries( - originalStepInput[key][innerKey] - )) { - if (typeof innerValue === "string") { - originalStepInput[key][innerKey][innerObject] = - automationUtils.substituteLoopStep( - innerValue, - `steps.${loopStepNumber}` - ) - } - } - } - } - } else { - if (typeof value === "string") { - originalStepInput[key] = - automationUtils.substituteLoopStep( - value, - `steps.${loopStepNumber}` - ) - } - } - } + originalStepInput = replaceFakeBindings( + originalStepInput, + currentLoopStepIndex + ) if ( - index === env.AUTOMATION_MAX_ITERATIONS || + stepIndex === env.AUTOMATION_MAX_ITERATIONS || (loopStep.inputs.iterations && - index === parseInt(loopStep.inputs.iterations)) + stepIndex === parseInt(loopStep.inputs.iterations)) ) { this.updateContextAndOutput( - loopStepNumber, + currentLoopStepIndex, step, tempOutput, { @@ -416,7 +385,7 @@ class Orchestrator { let isFailure = false const currentItem = - this._context.steps[loopStepNumber]?.currentItem + this._context.steps[currentLoopStepIndex]?.currentItem if (currentItem && typeof currentItem === "object") { isFailure = Object.keys(currentItem).some(value => { return currentItem[value] === loopStep?.inputs.failure @@ -428,7 +397,7 @@ class Orchestrator { if (isFailure) { this.updateContextAndOutput( - loopStepNumber, + currentLoopStepIndex, step, tempOutput, { @@ -453,7 +422,6 @@ class Orchestrator { continue } - // If it's a loop step, we need to manually add the bindings to the context let stepFn = await this.getStepFunctionality(step.stepId) let inputs = await processObject(originalStepInput, this._context) inputs = automationUtils.cleanInputValues( @@ -502,9 +470,9 @@ class Orchestrator { if (loopStep) { iterationCount++ - if (index === iterations - 1) { + if (stepIndex === iterations - 1) { loopStep = undefined - this._context.steps.splice(loopStepNumber, 1) + this._context.steps.splice(currentLoopStepIndex, 1) break } } @@ -515,7 +483,7 @@ class Orchestrator { if (loopStep && iterations === 0) { loopStep = undefined - this.executionOutput.steps.splice(loopStepNumber + 1, 0, { + this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, { id: step.id, stepId: step.stepId, outputs: { @@ -525,14 +493,14 @@ class Orchestrator { inputs: {}, }) - this._context.steps.splice(loopStepNumber, 1) + this._context.steps.splice(currentLoopStepIndex, 1) iterations = 1 } // Delete the step after the loop step as it's irrelevant, since information is included // in the loop step if (wasLoopStep && !loopStep) { - this._context.steps.splice(loopStepNumber + 1, 1) + this._context.steps.splice(currentLoopStepIndex + 1, 1) wasLoopStep = false } if (loopSteps && loopSteps.length) { @@ -541,13 +509,13 @@ class Orchestrator { items: loopSteps, iterations: iterationCount, } - this.executionOutput.steps.splice(loopStepNumber + 1, 0, { + this.executionOutput.steps.splice(currentLoopStepIndex + 1, 0, { id: step.id, stepId: step.stepId, outputs: tempOutput, inputs: step.inputs, }) - this._context.steps[loopStepNumber] = tempOutput + this._context.steps[currentLoopStepIndex] = tempOutput wasLoopStep = true loopSteps = [] diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts index e473675633..4bd4e8f583 100644 --- a/packages/server/src/utilities/schema.ts +++ b/packages/server/src/utilities/schema.ts @@ -79,7 +79,6 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults { } else if ( // If there's no data for this field don't bother with further checks // If the field is already marked as invalid there's no need for further checks - results.schemaValidation[columnName] === false || columnData == null || isAutoColumn ) { diff --git a/packages/shared-core/package.json b/packages/shared-core/package.json index 3049afdb95..da74d090b6 100644 --- a/packages/shared-core/package.json +++ b/packages/shared-core/package.json @@ -11,7 +11,7 @@ "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly --paths null", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "test": "jest", "test:watch": "yarn test --watchAll" }, @@ -21,7 +21,7 @@ }, "devDependencies": { "rimraf": "3.0.2", - "typescript": "5.2.2" + "typescript": "5.5.2" }, "nx": { "targets": { diff --git a/packages/shared-core/src/constants/index.ts b/packages/shared-core/src/constants/index.ts index c9d1a8fc8f..0713b5d2f8 100644 --- a/packages/shared-core/src/constants/index.ts +++ b/packages/shared-core/src/constants/index.ts @@ -164,14 +164,17 @@ export const InvalidFileExtensions = [ export enum BpmCorrelationKey { ONBOARDING = "budibase:onboarding:correlationkey", + VERIFY_SSO_LOGIN = "budibase:verify_sso_login:correlationkey", } export enum BpmInstanceKey { ONBOARDING = "budibase:onboarding:instancekey", + VERIFY_SSO_LOGIN = "budibase:verify_sso_login:instancekey", } export enum BpmStatusKey { ONBOARDING = "budibase:onboarding:status", + VERIFY_SSO_LOGIN = "budibase:verify_sso_login:status", } export enum BpmStatusValue { diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index bd75406e26..3c6901e195 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -13,6 +13,7 @@ import { RowSearchParams, EmptyFilterOption, SearchResponse, + Table, } from "@budibase/types" import dayjs from "dayjs" import { OperatorOptions, SqlNumberTypeRangeMap } from "./constants" @@ -131,13 +132,72 @@ const cleanupQuery = (query: SearchFilters) => { * Removes a numeric prefix on field names designed to give fields uniqueness */ export const removeKeyNumbering = (key: string): string => { + return getKeyNumbering(key).key +} + +/** + * Gets the part of the keys, returning the numeric prefix and the field name + */ +export const getKeyNumbering = ( + key: string +): { prefix?: string; key: string } => { if (typeof key === "string" && key.match(/\d[0-9]*:/g) != null) { const parts = key.split(":") // remove the number - parts.shift() - return parts.join(":") + const number = parts.shift() + return { prefix: `${number}:`, key: parts.join(":") } } else { - return key + return { key } + } +} + +/** + * Generates a splitter which can be used to split columns from a context into + * their components (number prefix, relationship column/table, column name) + */ +export class ColumnSplitter { + tableNames: string[] + tableIds: string[] + relationshipColumnNames: string[] + relationships: string[] + + constructor(tables: Table[]) { + this.tableNames = tables.map(table => table.name) + this.tableIds = tables.map(table => table._id!) + this.relationshipColumnNames = tables.flatMap(table => + Object.keys(table.schema).filter( + columnName => table.schema[columnName].type === FieldType.LINK + ) + ) + this.relationships = this.tableNames + .concat(this.tableIds) + .concat(this.relationshipColumnNames) + // sort by length - makes sure there's no mis-matches due to similarities (sub column names) + .sort((a, b) => b.length - a.length) + } + + run(key: string): { + numberPrefix?: string + relationshipPrefix?: string + column: string + } { + let { prefix, key: splitKey } = getKeyNumbering(key) + let relationship: string | undefined + for (let possibleRelationship of this.relationships) { + const withDot = `${possibleRelationship}.` + if (splitKey.startsWith(withDot)) { + const finalKeyParts = splitKey.split(withDot) + finalKeyParts.shift() + relationship = withDot + splitKey = finalKeyParts.join(".") + break + } + } + return { + numberPrefix: prefix, + relationshipPrefix: relationship, + column: splitKey, + } } } @@ -250,12 +310,16 @@ export const buildQuery = (filter: SearchFilter[]) => { query.equal = query.equal || {} query.equal[field] = true } else { - query[queryOperator] = query[queryOperator] || {} - query[queryOperator]![field] = value + query[queryOperator] = { + ...query[queryOperator], + [field]: value, + } } } else { - query[queryOperator] = query[queryOperator] || {} - query[queryOperator]![field] = value + query[queryOperator] = { + ...query[queryOperator], + [field]: value, + } } } }) diff --git a/packages/shared-core/src/table.ts b/packages/shared-core/src/table.ts index 4b578a2aef..8fd7909b18 100644 --- a/packages/shared-core/src/table.ts +++ b/packages/shared-core/src/table.ts @@ -54,20 +54,25 @@ export function canBeSortColumn(type: FieldType): boolean { } export function findDuplicateInternalColumns(table: Table): string[] { + // maintains the case of keys + const casedKeys = Object.keys(table.schema) // get the column names - const columnNames = Object.keys(table.schema) - .concat(CONSTANT_INTERNAL_ROW_COLS) - .map(colName => colName.toLowerCase()) + const uncasedKeys = casedKeys.map(colName => colName.toLowerCase()) // there are duplicates - const set = new Set(columnNames) + const set = new Set(uncasedKeys) let duplicates: string[] = [] - if (set.size !== columnNames.length) { + if (set.size !== uncasedKeys.length) { for (let key of set.keys()) { - const count = columnNames.filter(name => name === key).length + const count = uncasedKeys.filter(name => name === key).length if (count > 1) { duplicates.push(key) } } } + for (let internalColumn of CONSTANT_INTERNAL_ROW_COLS) { + if (casedKeys.find(key => key === internalColumn)) { + duplicates.push(internalColumn) + } + } return duplicates } diff --git a/packages/string-templates/package.json b/packages/string-templates/package.json index 8cf8d92692..238182a5a4 100644 --- a/packages/string-templates/package.json +++ b/packages/string-templates/package.json @@ -21,7 +21,7 @@ "scripts": { "build": "tsc --emitDeclarationOnly && rollup -c", "dev": "rollup -cw", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "test": "jest", "manifest": "ts-node ./scripts/gen-collection-info.ts" }, @@ -45,6 +45,6 @@ "rollup-plugin-node-resolve": "^5.2.0", "rollup-plugin-terser": "^7.0.2", "ts-jest": "29.1.1", - "typescript": "5.2.2" + "typescript": "5.5.2" } } diff --git a/packages/string-templates/src/processors/preprocessor.ts b/packages/string-templates/src/processors/preprocessor.ts index 010c259e12..5e96336e32 100644 --- a/packages/string-templates/src/processors/preprocessor.ts +++ b/packages/string-templates/src/processors/preprocessor.ts @@ -7,6 +7,7 @@ export const PreprocessorNames = { SWAP_TO_DOT: "swap-to-dot-notation", FIX_FUNCTIONS: "fix-functions", FINALISE: "finalise", + NORMALIZE_SPACES: "normalize-spaces", } class Preprocessor { @@ -50,6 +51,9 @@ export const processors = [ return statement }), + new Preprocessor(PreprocessorNames.NORMALIZE_SPACES, (statement: string) => { + return statement.replace(/{{(\s{2,})/g, "{{ ") + }), new Preprocessor( PreprocessorNames.FINALISE, (statement: string, opts: { noHelpers: any }) => { diff --git a/packages/string-templates/test/basic.spec.ts b/packages/string-templates/test/basic.spec.ts index ddea54c2bf..24a19131f4 100644 --- a/packages/string-templates/test/basic.spec.ts +++ b/packages/string-templates/test/basic.spec.ts @@ -320,3 +320,21 @@ describe("should leave HBS blocks if not found using option", () => { expect(output).toBe("{{ a }}, 1") }) }) + +describe("check multiple space behaviour", () => { + it("should remove whitespace and use the helper correctly", async () => { + const output = await processString("{{ add num1 num2 }}", { + num1: 1, + num2: 2, + }) + expect(output).toEqual("3") + }) + + it("should ensure that whitespace within a string is respected", async () => { + const output = await processString("{{ trimRight 'test string ' }}", { + num1: 1, + num2: 2, + }) + expect(output).toEqual("test string") + }) +}) diff --git a/packages/types/package.json b/packages/types/package.json index f4c7b13344..c44fff971e 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -11,7 +11,7 @@ "build": "node ../../scripts/build.js && tsc -p tsconfig.build.json --emitDeclarationOnly", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "dev": "tsc -p tsconfig.json --watch --preserveWatchOutput", - "check:types": "tsc -p tsconfig.json --noEmit --paths null" + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020" }, "jest": {}, "devDependencies": { @@ -20,7 +20,7 @@ "@types/pouchdb": "6.4.0", "@types/redlock": "4.0.7", "rimraf": "3.0.2", - "typescript": "5.2.2" + "typescript": "5.5.2" }, "dependencies": { "scim-patch": "^0.8.1" diff --git a/packages/types/src/documents/app/automation.ts b/packages/types/src/documents/app/automation.ts index 5954a47151..6ea62ffffb 100644 --- a/packages/types/src/documents/app/automation.ts +++ b/packages/types/src/documents/app/automation.ts @@ -144,7 +144,7 @@ interface BaseIOStructure { required?: string[] } -interface InputOutputBlock { +export interface InputOutputBlock { properties: { [key: string]: BaseIOStructure } diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index e5cbccf5c1..c40f1c3b84 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -122,6 +122,8 @@ export interface QueryJson { table: Table tables?: Record renamed?: RenameColumn + // can specify something that columns could be prefixed with + columnPrefix?: string } extra?: { idFilter?: SearchFilters diff --git a/packages/worker/package.json b/packages/worker/package.json index 95410668da..02f1181e91 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -15,7 +15,7 @@ "prebuild": "rimraf dist/", "build": "node ../../scripts/build.js", "postbuild": "copyfiles -f ../../yarn.lock ./dist/", - "check:types": "tsc -p tsconfig.json --noEmit --paths null", + "check:types": "tsc -p tsconfig.json --noEmit --paths null --target es2020", "build:dev": "yarn prebuild && tsc --build --watch --preserveWatchOutput", "run:docker": "node dist/index.js", "debug": "yarn build && node --expose-gc --inspect=9223 dist/index.js", @@ -91,7 +91,7 @@ "rimraf": "3.0.2", "supertest": "6.3.3", "timekeeper": "2.2.0", - "typescript": "5.2.2", + "typescript": "5.5.2", "update-dotenv": "1.1.1" }, "nx": { diff --git a/packages/worker/src/api/controllers/system/environment.ts b/packages/worker/src/api/controllers/system/environment.ts index 203d3d41ff..4deca5df6e 100644 --- a/packages/worker/src/api/controllers/system/environment.ts +++ b/packages/worker/src/api/controllers/system/environment.ts @@ -3,12 +3,6 @@ import env from "../../../environment" import { env as coreEnv } from "@budibase/backend-core" import nodeFetch from "node-fetch" -// When we come to move to SQS fully and move away from Clouseau, we will need -// to flip this to true (or remove it entirely). This will then be used to -// determine if we should show the maintenance page that links to the SQS -// migration docs. -const sqsRequired = false - let sqsAvailable: boolean async function isSqsAvailable() { // We cache this value for the duration of the Node process because we don't @@ -30,7 +24,7 @@ async function isSqsAvailable() { } async function isSqsMissing() { - return sqsRequired && !(await isSqsAvailable()) + return env.SQS_SEARCH_ENABLE && !(await isSqsAvailable()) } export const fetch = async (ctx: Ctx) => { diff --git a/scripts/deploy-camunda.sh b/scripts/deploy-camunda.sh index 7059b6f072..90400a0449 100755 --- a/scripts/deploy-camunda.sh +++ b/scripts/deploy-camunda.sh @@ -23,6 +23,7 @@ echo "deploy processes..." zbctl deploy resource offboarding.bpmn --insecure zbctl deploy resource onboarding.bpmn --insecure zbctl deploy resource free_trial.bpmn --insecure +zbctl deploy resource verify_sso_login.bpmn --insecure cd ../../../../../budibase/packages/account-portal/packages/server diff --git a/scripts/run-affected.js b/scripts/run-affected.js new file mode 100755 index 0000000000..97f79bb463 --- /dev/null +++ b/scripts/run-affected.js @@ -0,0 +1,34 @@ +/*** + * Running lerna with since and scope is not working as expected. + * For example, running the command `yarn test --scope=@budibase/worker --since=master`, with changes only on `@budibase/backend-core` will not work as expected, as it does not analyse the dependencies properly. The actual `@budibase/worker` task will not be triggered. + * + * This script is using `lerna ls` to detect all the affected projects from a given commit, and if the scoped package is affected, the actual command will be executed. + * + * The current version of the script only supports a single project in the scope. + */ + +const { execSync } = require("child_process") + +const argv = require("yargs").demandOption(["task", "since", "scope"]).argv + +const { task, since, scope } = argv + +const affectedPackages = execSync( + `yarn --silent nx show projects --affected -t ${task} --base=${since} --json`, + { + encoding: "utf-8", + } +) + +const packages = JSON.parse(affectedPackages) + +const isAffected = packages.includes(scope) + +if (isAffected) { + console.log(`${scope} is affected. Running task "${task}"`) + execSync(`yarn ${task} --scope=${scope}`, { + stdio: "inherit", + }) +} else { + console.log(`${scope} is not affected. Skipping task "${task}"`) +} diff --git a/yarn.lock b/yarn.lock index 9914c334df..542847053d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3514,32 +3514,84 @@ path-to-regexp "1.x" urijs "^1.19.2" -"@lerna/child-process@7.1.1": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@lerna/child-process/-/child-process-7.1.1.tgz#60eddd6dc4b6ba0fd51851c78b6dbdc4e1614220" - integrity sha512-mR8PaTkckYPLmEBG2VsVsJq2UuzEvjXevOB1rKLKUZ/dPCGcottVhbiEzTxickc+s7Y/1dTTLn/1BKj3B1a5BA== +"@lerna/child-process@7.4.2": + version "7.4.2" + resolved "https://registry.yarnpkg.com/@lerna/child-process/-/child-process-7.4.2.tgz#a2fd013ac2150dc288270d3e0d0b850c06bec511" + integrity sha512-je+kkrfcvPcwL5Tg8JRENRqlbzjdlZXyaR88UcnCdNW0AJ1jX9IfHRys1X7AwSroU2ug8ESNC+suoBw1vX833Q== dependencies: chalk "^4.1.0" execa "^5.0.0" strong-log-transformer "^2.1.0" -"@lerna/create@7.1.1": - version "7.1.1" - resolved "https://registry.yarnpkg.com/@lerna/create/-/create-7.1.1.tgz#2af94afb01971c1b594c06347b6998607aefe5c4" - integrity sha512-1PY2OgwGxp7b91JzLKEhONVl69mCt1IyYEc6pzKy3Sv+UOdeK2QFq1SX/85hNOR3iitiyZ75bNWUTcBly1ZlZg== +"@lerna/create@7.4.2": + version "7.4.2" + resolved "https://registry.yarnpkg.com/@lerna/create/-/create-7.4.2.tgz#f845fad1480e46555af98bd39af29571605dddc9" + integrity sha512-1wplFbQ52K8E/unnqB0Tq39Z4e+NEoNrpovEnl6GpsTUrC6WDp8+w0Le2uCBV0hXyemxChduCkLz4/y1H1wTeg== dependencies: - "@lerna/child-process" "7.1.1" + "@lerna/child-process" "7.4.2" + "@npmcli/run-script" "6.0.2" + "@nx/devkit" ">=16.5.1 < 17" + "@octokit/plugin-enterprise-rest" "6.0.1" + "@octokit/rest" "19.0.11" + byte-size "8.1.1" + chalk "4.1.0" + clone-deep "4.0.1" + cmd-shim "6.0.1" + columnify "1.6.0" + conventional-changelog-core "5.0.1" + conventional-recommended-bump "7.0.1" + cosmiconfig "^8.2.0" dedent "0.7.0" + execa "5.0.0" fs-extra "^11.1.1" + get-stream "6.0.0" + git-url-parse "13.1.0" + glob-parent "5.1.2" + globby "11.1.0" + graceful-fs "4.2.11" + has-unicode "2.0.1" + ini "^1.3.8" init-package-json "5.0.0" + inquirer "^8.2.4" + is-ci "3.0.1" + is-stream "2.0.0" + js-yaml "4.1.0" + libnpmpublish "7.3.0" + load-json-file "6.2.0" + lodash "^4.17.21" + make-dir "4.0.0" + minimatch "3.0.5" + multimatch "5.0.0" + node-fetch "2.6.7" npm-package-arg "8.1.1" + npm-packlist "5.1.1" + npm-registry-fetch "^14.0.5" + npmlog "^6.0.2" + nx ">=16.5.1 < 17" + p-map "4.0.0" + p-map-series "2.1.0" + p-queue "6.6.2" p-reduce "^2.1.0" pacote "^15.2.0" pify "5.0.0" + read-cmd-shim "4.0.0" + read-package-json "6.0.4" + resolve-from "5.0.0" + rimraf "^4.4.1" semver "^7.3.4" + signal-exit "3.0.7" slash "^3.0.0" + ssri "^9.0.1" + strong-log-transformer "2.1.0" + tar "6.1.11" + temp-dir "1.0.0" + upath "2.0.1" + uuid "^9.0.0" validate-npm-package-license "^3.0.4" validate-npm-package-name "5.0.0" + write-file-atomic "5.0.1" + write-pkg "4.0.0" + yargs "16.2.0" yargs-parser "20.2.4" "@lezer/common@^1.0.0": @@ -3717,12 +3769,12 @@ read-package-json-fast "^3.0.0" which "^3.0.0" -"@nrwl/devkit@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nrwl/devkit/-/devkit-16.4.3.tgz#036be0d478ef7156e55c1cfb4d13da080983503d" - integrity sha512-sDGv3RX5DHBMFFiHdd91e4YFXb87X5jsKkEg5Y2jmFtz/OilBKA9yoRhZ8t+iLBOmbgUngC5ZYPHc+Ykd2U3nA== +"@nrwl/devkit@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nrwl/devkit/-/devkit-16.10.0.tgz#ac8c5b4db00f12c4b817c937be2f7c4eb8f2593c" + integrity sha512-fRloARtsDQoQgQ7HKEy0RJiusg/HSygnmg4gX/0n/Z+SUS+4KoZzvHjXc6T5ZdEiSjvLypJ+HBM8dQzIcVACPQ== dependencies: - "@nx/devkit" "16.4.3" + "@nx/devkit" "16.10.0" "@nrwl/nx-cloud@16.0.5": version "16.0.5" @@ -3731,74 +3783,76 @@ dependencies: nx-cloud "16.0.5" -"@nrwl/tao@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nrwl/tao/-/tao-16.4.3.tgz#8a72e8c1c903d8d7e1d9a298c28f03a000a925d8" - integrity sha512-h+/UdXtdVuH9K2+Rx1HK5AHXGtgXNIqdLIH1KRL+74fiQ+JNO2Xuz9wqiD+rZ5tmtL/4hZpePCMkTz2FusKvbA== +"@nrwl/tao@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nrwl/tao/-/tao-16.10.0.tgz#94642a0380709b8e387e1e33705a5a9624933375" + integrity sha512-QNAanpINbr+Pod6e1xNgFbzK1x5wmZl+jMocgiEFXZ67KHvmbD6MAQQr0MMz+GPhIu7EE4QCTLTyCEMlAG+K5Q== dependencies: - nx "16.4.3" + nx "16.10.0" + tslib "^2.3.0" -"@nx/devkit@16.4.3", "@nx/devkit@>=16.1.3 < 17": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/devkit/-/devkit-16.4.3.tgz#a5e691f1fd49b5b0d8bb0a4347b3501b11e33056" - integrity sha512-5LHtia3Ioy4uwWDIpnCbslFwxNdRJ2cWWmzq4oDINZnUMzNsjatVowKkOUBeG4Xh++6Dvui/VSdKZ6J0MUoQzw== +"@nx/devkit@16.10.0", "@nx/devkit@>=16.5.1 < 17": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/devkit/-/devkit-16.10.0.tgz#7e466be2dee2dcb1ccaf286786ca2a0a639aa007" + integrity sha512-IvKQqRJFDDiaj33SPfGd3ckNHhHi6ceEoqCbAP4UuMXOPPVOX6H0KVk+9tknkPb48B7jWIw6/AgOeWkBxPRO5w== dependencies: - "@nrwl/devkit" "16.4.3" + "@nrwl/devkit" "16.10.0" ejs "^3.1.7" + enquirer "~2.3.6" ignore "^5.0.4" semver "7.5.3" tmp "~0.2.1" tslib "^2.3.0" -"@nx/nx-darwin-arm64@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-darwin-arm64/-/nx-darwin-arm64-16.4.3.tgz#08e63921c4e4dfc9eb9da612140c62ca8c190059" - integrity sha512-iVr3KTHXqGWx34mLxKjdDT1m6px9NME7zqSoKZW9DQuxDt3G7NN4PkK6+n2YqVNNSOmYml/Oo5iVtQ2TUCJDFA== +"@nx/nx-darwin-arm64@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-darwin-arm64/-/nx-darwin-arm64-16.10.0.tgz#0c73010cac7a502549483b12bad347da9014e6f1" + integrity sha512-YF+MIpeuwFkyvM5OwgY/rTNRpgVAI/YiR0yTYCZR+X3AAvP775IVlusNgQ3oedTBRUzyRnI4Tknj1WniENFsvQ== -"@nx/nx-darwin-x64@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-darwin-x64/-/nx-darwin-x64-16.4.3.tgz#e1e591f38bd103cf110487bd8c35daf17f8636c7" - integrity sha512-Km1N7Rek4VZW9rFMpV/gwmW0YHCoeV/5/tbYOYjSPJY6n2GB/vVoqE1DTf69muIk32436aK+qYRpd98bXC8GKg== +"@nx/nx-darwin-x64@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-darwin-x64/-/nx-darwin-x64-16.10.0.tgz#2ccf270418d552fd0a8e0d6089aee4944315adaa" + integrity sha512-ypi6YxwXgb0kg2ixKXE3pwf5myVNUgWf1CsV5OzVccCM8NzheMO51KDXTDmEpXdzUsfT0AkO1sk5GZeCjhVONg== -"@nx/nx-freebsd-x64@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-freebsd-x64/-/nx-freebsd-x64-16.4.3.tgz#dc7fd8dbb87d7eb613b3f7302b0e3cba233277fd" - integrity sha512-i6gc7oiDekYY2DS20COoeIrUqSQt0A3V+xUbrMGTInbHMux8QlfY5LGPRHGzqRlvnmUbrpgN0TdwBB9KOgaWmw== +"@nx/nx-freebsd-x64@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-freebsd-x64/-/nx-freebsd-x64-16.10.0.tgz#c3ee6914256e69493fed9355b0d6661d0e86da44" + integrity sha512-UeEYFDmdbbDkTQamqvtU8ibgu5jQLgFF1ruNb/U4Ywvwutw2d4ruOMl2e0u9hiNja9NFFAnDbvzrDcMo7jYqYw== -"@nx/nx-linux-arm-gnueabihf@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-linux-arm-gnueabihf/-/nx-linux-arm-gnueabihf-16.4.3.tgz#5a2aa53297eff9b3d0cef5b0280d67400e61e80d" - integrity sha512-hozcDrzbv3X0oWYYbJfSybVmKviko78wjjxvdwYS2H9eqNN6sNBZ5+LL+duUazCeGGHj1fRipvb9E3rJxiKWEw== +"@nx/nx-linux-arm-gnueabihf@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-linux-arm-gnueabihf/-/nx-linux-arm-gnueabihf-16.10.0.tgz#a961eccbb38acb2da7fc125b29d1fead0b39152f" + integrity sha512-WV3XUC2DB6/+bz1sx+d1Ai9q2Cdr+kTZRN50SOkfmZUQyEBaF6DRYpx/a4ahhxH3ktpNfyY8Maa9OEYxGCBkQA== -"@nx/nx-linux-arm64-gnu@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-linux-arm64-gnu/-/nx-linux-arm64-gnu-16.4.3.tgz#2129426f8258e193f9997adafcda570e23d94435" - integrity sha512-LrlSKCZtFl8TiIFuLjkSNN/yzQ8phZ6+0jgsuumrIE8t02y+WLcZ4dSGlCo4nwVX/MDCtTbc9LPI+rIoBvO/pQ== +"@nx/nx-linux-arm64-gnu@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-linux-arm64-gnu/-/nx-linux-arm64-gnu-16.10.0.tgz#795f20072549d03822b5c4639ef438e473dbb541" + integrity sha512-aWIkOUw995V3ItfpAi5FuxQ+1e9EWLS1cjWM1jmeuo+5WtaKToJn5itgQOkvSlPz+HSLgM3VfXMvOFALNk125g== -"@nx/nx-linux-arm64-musl@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-linux-arm64-musl/-/nx-linux-arm64-musl-16.4.3.tgz#0285f71f94b5a2eb40f15033457f937e0362770d" - integrity sha512-3ahS0k330T339FdVBQhr3EGrghAaezqdVpbOwG2pyiZRwvLVgnDkPF/d4EkGd3ZAsOLazcPkPH/fKxPPf8HP2g== +"@nx/nx-linux-arm64-musl@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-linux-arm64-musl/-/nx-linux-arm64-musl-16.10.0.tgz#f2428ee6dbe2b2c326e8973f76c97666def33607" + integrity sha512-uO6Gg+irqpVcCKMcEPIQcTFZ+tDI02AZkqkP7koQAjniLEappd8DnUBSQdcn53T086pHpdc264X/ZEpXFfrKWQ== -"@nx/nx-linux-x64-gnu@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-linux-x64-gnu/-/nx-linux-x64-gnu-16.4.3.tgz#defea39bbd5f494c28369bd403f909d5ec905ac0" - integrity sha512-Nbo+FLBYZRhJUB367Eg9f0mH7Q+X67H+QAF+wU2oK3StSGQNQbLnr7Q0yfmX912WdYDe7gWhEpqWTLZ7rv65mg== +"@nx/nx-linux-x64-gnu@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-linux-x64-gnu/-/nx-linux-x64-gnu-16.10.0.tgz#d36c2bcf94d49eaa24e3880ddaf6f1f617de539b" + integrity sha512-134PW/u/arNFAQKpqMJniC7irbChMPz+W+qtyKPAUXE0XFKPa7c1GtlI/wK2dvP9qJDZ6bKf0KtA0U/m2HMUOA== -"@nx/nx-linux-x64-musl@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-linux-x64-musl/-/nx-linux-x64-musl-16.4.3.tgz#c78db85f3234d2b899c7acaa7b5e2ef2c8591eb6" - integrity sha512-RG31ewe3GRmwSMBgWF0yeJ1zu8s42xywpwK8swgGHpUp+Z6JN8dkUqi7UfHGbjeaOIDg4w45/7OJyrE7dlqHCg== +"@nx/nx-linux-x64-musl@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-linux-x64-musl/-/nx-linux-x64-musl-16.10.0.tgz#78bd2ab97a583b3d4ea3387b67fd7b136907493c" + integrity sha512-q8sINYLdIJxK/iUx9vRk5jWAWb/2O0PAbOJFwv4qkxBv4rLoN7y+otgCZ5v0xfx/zztFgk/oNY4lg5xYjIso2Q== -"@nx/nx-win32-arm64-msvc@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-win32-arm64-msvc/-/nx-win32-arm64-msvc-16.4.3.tgz#d131273e8267eb98a7640f79a94049b5f12d572e" - integrity sha512-5HXY8S0vGUculndAhWqBrqkrQxY6M3v3Ac/3rr8O238JkdkhRiHilnGbwS2MIQpU7dou3wROO6wKT7+TyFv+cA== +"@nx/nx-win32-arm64-msvc@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-win32-arm64-msvc/-/nx-win32-arm64-msvc-16.10.0.tgz#ef20ec8d0c83d66e73e20df12d2c788b8f866396" + integrity sha512-moJkL9kcqxUdJSRpG7dET3UeLIciwrfP08mzBQ12ewo8K8FzxU8ZUsTIVVdNrwt01CXOdXoweGfdQLjJ4qTURA== -"@nx/nx-win32-x64-msvc@16.4.3": - version "16.4.3" - resolved "https://registry.yarnpkg.com/@nx/nx-win32-x64-msvc/-/nx-win32-x64-msvc-16.4.3.tgz#cc8d87dbada3965b3156440277951b742b6c0de3" - integrity sha512-9vdA5t5xuWCQ9JFJZFjzYGz9w5wtZ7zfKcx2HdBvg2nDWUzK5Z3khwsakTSsc7Ff7Hnd0i0l5T3Ls6Hk42Haww== +"@nx/nx-win32-x64-msvc@16.10.0": + version "16.10.0" + resolved "https://registry.yarnpkg.com/@nx/nx-win32-x64-msvc/-/nx-win32-x64-msvc-16.10.0.tgz#7410a51d0f8be631eec9552f01b2e5946285927c" + integrity sha512-5iV2NKZnzxJwZZ4DM5JVbRG/nkhAbzEskKaLBB82PmYGKzaDHuMHP1lcPoD/rtYMlowZgNA/RQndfKvPBPwmXA== "@octokit/auth-token@^3.0.0": version "3.0.3" @@ -5911,6 +5965,14 @@ "@types/node" "*" form-data "^3.0.0" +"@types/node-fetch@^2.6.4": + version "2.6.11" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.11.tgz#9b39b78665dae0e82a08f02f4967d62c66f95d24" + integrity sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g== + dependencies: + "@types/node" "*" + form-data "^4.0.0" + "@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0", "@types/node@^20.4.5": version "20.12.10" resolved "https://registry.yarnpkg.com/@types/node/-/node-20.12.10.tgz#8f0c3f12b0f075eee1fe20c1afb417e9765bef76" @@ -7476,7 +7538,7 @@ axios-retry@^3.1.9: "@babel/runtime" "^7.15.4" is-retry-allowed "^2.2.0" -axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^0.26.0, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0: +axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0: version "1.6.3" resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4" integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww== @@ -8848,10 +8910,10 @@ content-type@^1.0.4: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== -conventional-changelog-angular@6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/conventional-changelog-angular/-/conventional-changelog-angular-6.0.0.tgz#a9a9494c28b7165889144fd5b91573c4aa9ca541" - integrity sha512-6qLgrBF4gueoC7AFVHu51nHL9pF9FRjXrH+ceVf7WmAfH3gs+gEYOkvxhjMPjZu57I4AGUGoNTY8V7Hrgf1uqg== +conventional-changelog-angular@7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/conventional-changelog-angular/-/conventional-changelog-angular-7.0.0.tgz#5eec8edbff15aa9b1680a8dcfbd53e2d7eb2ba7a" + integrity sha512-ROjNchA9LgfNMTTFSIWPzebCwOGFdgkEq45EnvvrmSLvCtAw0HSmrCs7/ty+wAeYUZyNay0YMUNYFTRL72PkBQ== dependencies: compare-func "^2.0.0" @@ -10063,6 +10125,11 @@ dot-prop@^5.1.0, dot-prop@^5.2.0: dependencies: is-obj "^2.0.0" +dotenv-expand@~10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/dotenv-expand/-/dotenv-expand-10.0.0.tgz#12605d00fb0af6d0a592e6558585784032e4ef37" + integrity sha512-GopVGCpVS1UKH75VKHGuQFqS1Gusej0z4FyQkPdwjil2gNIv+LNsqBlboOzpJFZKVT95GkCyWJbBSdFEFUWI2A== + dotenv@16.0.1: version "16.0.1" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.1.tgz#8f8f9d94876c35dac989876a5d3a82a267fdce1d" @@ -10083,6 +10150,11 @@ dotenv@~10.0.0: resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-10.0.0.tgz#3d4227b8fb95f81096cdd2b66653fb2c7085ba81" integrity sha512-rlBi9d8jpv9Sf1klPjNfFAuWDjKLwTIJJ/VxtoTwIR6hnZxcEOQCZg2oIL3MWBYw5GpUDKOEnND7LXTbIpQ03Q== +dotenv@~16.3.1: + version "16.3.2" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.2.tgz#3cb611ce5a63002dbabf7c281bc331f69d28f03f" + integrity sha512-HTlk5nmhkm8F6JcdXvHIzaorzCoziNQT9mGxLPVXW8wJF1TiGSL60ZGB4gHWabHOaMmWmhvk2/lPHfnBiT78AQ== + double-ended-queue@2.1.0-0: version "2.1.0-0" resolved "https://registry.yarnpkg.com/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz#103d3527fd31528f40188130c841efdd78264e5c" @@ -11143,17 +11215,6 @@ fast-fifo@^1.1.0, fast-fifo@^1.2.0: resolved "https://registry.yarnpkg.com/fast-fifo/-/fast-fifo-1.3.2.tgz#286e31de96eb96d38a97899815740ba2a4f3640c" integrity sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ== -fast-glob@3.2.7: - version "3.2.7" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.7.tgz#fd6cb7a2d7e9aa7a7846111e85a196d6b2f766a1" - integrity sha512-rYGMRwip6lUMvYD3BTScMwT1HtAs2d71SMv66Vrxs0IekGZEjhM0pcMfjQPnknBt2zeCwQMEupiN02ZP4DiT1Q== - dependencies: - "@nodelib/fs.stat" "^2.0.2" - "@nodelib/fs.walk" "^1.2.3" - glob-parent "^5.1.2" - merge2 "^1.3.0" - micromatch "^4.0.4" - fast-glob@^3.2.11, fast-glob@^3.2.9: version "3.2.12" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" @@ -11480,6 +11541,11 @@ forever-agent@~0.6.1: resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== +form-data-encoder@1.7.2: + version "1.7.2" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040" + integrity sha512-qfqtYan3rxrnCk1VYaA4H+Ms9xdpPqvLZa6xmMgFvhO32x7/3J/ExcTd6qpxM0vH2GdMI+poehyBZvqfMTto8A== + form-data@4.0.0, form-data@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/form-data/-/form-data-4.0.0.tgz#93919daeaf361ee529584b9b31664dc12c9fa452" @@ -11516,6 +11582,14 @@ form-data@~2.3.2: combined-stream "^1.0.6" mime-types "^2.1.12" +formdata-node@^4.3.2: + version "4.4.1" + resolved "https://registry.yarnpkg.com/formdata-node/-/formdata-node-4.4.1.tgz#23f6a5cb9cb55315912cbec4ff7b0f59bbd191e2" + integrity sha512-0iirZp3uVDjVGt9p49aTaqjk84TrglENEDuqfdlZQ1roC9CWlPk6Avf8EEnZNcAqPonwkG35x4n3ww/1THYAeQ== + dependencies: + node-domexception "1.0.0" + web-streams-polyfill "4.0.0-beta.3" + formidable@^1.1.1: version "1.2.6" resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.2.6.tgz#d2a51d60162bbc9b4a055d8457a7c75315d1a168" @@ -13633,7 +13707,7 @@ jest-config@^29.7.0: slash "^3.0.0" strip-json-comments "^3.1.1" -"jest-diff@>=29.4.3 < 30", jest-diff@^29.7.0: +"jest-diff@>=29.4.3 < 30", jest-diff@^29.4.1, jest-diff@^29.7.0: version "29.7.0" resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a" integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw== @@ -14670,15 +14744,15 @@ leaflet@^1.7.1: resolved "https://registry.yarnpkg.com/leaflet/-/leaflet-1.9.3.tgz#52ec436954964e2d3d39e0d433da4b2500d74414" integrity sha512-iB2cR9vAkDOu5l3HAay2obcUHZ7xwUBBjph8+PGtmW/2lYhbLizWtG7nTeYht36WfOslixQF9D/uSIzhZgGMfQ== -lerna@7.1.1: - version "7.1.1" - resolved "https://registry.yarnpkg.com/lerna/-/lerna-7.1.1.tgz#6703062e6c4ddefdaf41e8890e9200690924fd71" - integrity sha512-rjivAl3bYu2+lWOi90vy0tYFgwBYPMiNkR/DuEWZC08wle5dsbOZ/SlXeLk9+kzbF89Bt5P6p+qF78A2tJsWPA== +lerna@7.4.2: + version "7.4.2" + resolved "https://registry.yarnpkg.com/lerna/-/lerna-7.4.2.tgz#03497125d7b7c8d463eebfe17a701b16bde2ad09" + integrity sha512-gxavfzHfJ4JL30OvMunmlm4Anw7d7Tq6tdVHzUukLdS9nWnxCN/QB21qR+VJYp5tcyXogHKbdUEGh6qmeyzxSA== dependencies: - "@lerna/child-process" "7.1.1" - "@lerna/create" "7.1.1" + "@lerna/child-process" "7.4.2" + "@lerna/create" "7.4.2" "@npmcli/run-script" "6.0.2" - "@nx/devkit" ">=16.1.3 < 17" + "@nx/devkit" ">=16.5.1 < 17" "@octokit/plugin-enterprise-rest" "6.0.1" "@octokit/rest" "19.0.11" byte-size "8.1.1" @@ -14686,7 +14760,7 @@ lerna@7.1.1: clone-deep "4.0.1" cmd-shim "6.0.1" columnify "1.6.0" - conventional-changelog-angular "6.0.0" + conventional-changelog-angular "7.0.0" conventional-changelog-core "5.0.1" conventional-recommended-bump "7.0.1" cosmiconfig "^8.2.0" @@ -14712,7 +14786,8 @@ lerna@7.1.1: libnpmaccess "7.0.2" libnpmpublish "7.3.0" load-json-file "6.2.0" - make-dir "3.1.0" + lodash "^4.17.21" + make-dir "4.0.0" minimatch "3.0.5" multimatch "5.0.0" node-fetch "2.6.7" @@ -14720,7 +14795,7 @@ lerna@7.1.1: npm-packlist "5.1.1" npm-registry-fetch "^14.0.5" npmlog "^6.0.2" - nx ">=16.1.3 < 17" + nx ">=16.5.1 < 17" p-map "4.0.0" p-map-series "2.1.0" p-pipe "3.1.0" @@ -15442,12 +15517,12 @@ magic-string@^0.30.5: dependencies: "@jridgewell/sourcemap-codec" "^1.4.15" -make-dir@3.1.0, make-dir@^3.0.0, make-dir@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== +make-dir@4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-4.0.0.tgz#c3c2307a771277cd9638305f915c29ae741b614e" + integrity sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw== dependencies: - semver "^6.0.0" + semver "^7.5.3" make-dir@^1.0.0, make-dir@^1.3.0: version "1.3.0" @@ -15464,6 +15539,13 @@ make-dir@^2.1.0: pify "^4.0.1" semver "^5.6.0" +make-dir@^3.0.0, make-dir@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" + integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== + dependencies: + semver "^6.0.0" + make-error@1.x, make-error@^1.1.1: version "1.3.6" resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2" @@ -16262,6 +16344,11 @@ node-addon-api@^6.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76" integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA== +node-domexception@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" + integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== + node-fetch@2.6.0, node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -16315,7 +16402,7 @@ node-int64@^0.4.0: resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" integrity sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw== -node-machine-id@^1.1.12: +node-machine-id@1.1.12, node-machine-id@^1.1.12: version "1.1.12" resolved "https://registry.yarnpkg.com/node-machine-id/-/node-machine-id-1.1.12.tgz#37904eee1e59b320bb9c5d6c0a59f3b469cb6267" integrity sha512-QNABxbrPa3qEIfrE6GOJ7BYIuignnJw7iQ2YPbc3Nla1HzRJjXzZOiikfF8m7eAMfichLt3M4VgLOetqgDmgGQ== @@ -16637,12 +16724,12 @@ nx-cloud@16.0.5: tar "6.1.11" yargs-parser ">=21.1.1" -nx@16.4.3, "nx@>=16.1.3 < 17": - version "16.4.3" - resolved "https://registry.yarnpkg.com/nx/-/nx-16.4.3.tgz#0bd8e408eeb9f09f9fca334689bf3d13f361254f" - integrity sha512-bq3wc7WI/j/mmz4MbrhDVE+DLJ6ywvmAoUjxNRcVAhPi+rT7bDaztVZceDbxxVFW55wfOIjcYwhS9fGQMSBBpQ== +nx@16.10.0, "nx@>=16.5.1 < 17": + version "16.10.0" + resolved "https://registry.yarnpkg.com/nx/-/nx-16.10.0.tgz#b070461f7de0a3d7988bd78558ea84cda3543ace" + integrity sha512-gZl4iCC0Hx0Qe1VWmO4Bkeul2nttuXdPpfnlcDKSACGu3ZIo+uySqwOF8yBAxSTIf8xe2JRhgzJN1aFkuezEBg== dependencies: - "@nrwl/tao" "16.4.3" + "@nrwl/tao" "16.10.0" "@parcel/watcher" "2.0.4" "@yarnpkg/lockfile" "^1.1.0" "@yarnpkg/parsers" "3.0.0-rc.46" @@ -16651,19 +16738,21 @@ nx@16.4.3, "nx@>=16.1.3 < 17": chalk "^4.1.0" cli-cursor "3.1.0" cli-spinners "2.6.1" - cliui "^7.0.2" - dotenv "~10.0.0" + cliui "^8.0.1" + dotenv "~16.3.1" + dotenv-expand "~10.0.0" enquirer "~2.3.6" - fast-glob "3.2.7" figures "3.2.0" flat "^5.0.2" fs-extra "^11.1.0" glob "7.1.4" ignore "^5.0.4" + jest-diff "^29.4.1" js-yaml "4.1.0" jsonc-parser "3.2.0" lines-and-columns "~2.0.3" minimatch "3.0.5" + node-machine-id "1.1.12" npm-run-path "^4.0.1" open "^8.4.0" semver "7.5.3" @@ -16677,16 +16766,16 @@ nx@16.4.3, "nx@>=16.1.3 < 17": yargs "^17.6.2" yargs-parser "21.1.1" optionalDependencies: - "@nx/nx-darwin-arm64" "16.4.3" - "@nx/nx-darwin-x64" "16.4.3" - "@nx/nx-freebsd-x64" "16.4.3" - "@nx/nx-linux-arm-gnueabihf" "16.4.3" - "@nx/nx-linux-arm64-gnu" "16.4.3" - "@nx/nx-linux-arm64-musl" "16.4.3" - "@nx/nx-linux-x64-gnu" "16.4.3" - "@nx/nx-linux-x64-musl" "16.4.3" - "@nx/nx-win32-arm64-msvc" "16.4.3" - "@nx/nx-win32-x64-msvc" "16.4.3" + "@nx/nx-darwin-arm64" "16.10.0" + "@nx/nx-darwin-x64" "16.10.0" + "@nx/nx-freebsd-x64" "16.10.0" + "@nx/nx-linux-arm-gnueabihf" "16.10.0" + "@nx/nx-linux-arm64-gnu" "16.10.0" + "@nx/nx-linux-arm64-musl" "16.10.0" + "@nx/nx-linux-x64-gnu" "16.10.0" + "@nx/nx-linux-x64-musl" "16.10.0" + "@nx/nx-win32-arm64-msvc" "16.10.0" + "@nx/nx-win32-x64-msvc" "16.10.0" oauth-sign@~0.9.0: version "0.9.0" @@ -16891,13 +16980,19 @@ open@^8.0.0, open@^8.4.0, open@~8.4.0: is-docker "^2.1.1" is-wsl "^2.2.0" -openai@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/openai/-/openai-3.2.1.tgz#1fa35bdf979cbde8453b43f2dd3a7d401ee40866" - integrity sha512-762C9BNlJPbjjlWZi4WYK9iM2tAVAv0uUp1UmI34vb0CN5T2mjB/qM6RYBmNKMh/dN9fC+bxqPwWJZUTWW052A== +openai@^4.52.1: + version "4.52.1" + resolved "https://registry.yarnpkg.com/openai/-/openai-4.52.1.tgz#44acc362a844fa2927b0cfa1fb70fb51e388af65" + integrity sha512-kv2hevAWZZ3I/vd2t8znGO2rd8wkowncsfcYpo8i+wU9ML+JEcdqiViANXXjWWGjIhajFNixE6gOY1fEgqILAg== dependencies: - axios "^0.26.0" - form-data "^4.0.0" + "@types/node" "^18.11.18" + "@types/node-fetch" "^2.6.4" + abort-controller "^3.0.0" + agentkeepalive "^4.2.1" + form-data-encoder "1.7.2" + formdata-node "^4.3.2" + node-fetch "^2.6.7" + web-streams-polyfill "^3.2.1" openapi-response-validator@^9.2.0: version "9.3.1" @@ -21759,6 +21854,11 @@ typescript@5.2.2, "typescript@>=3 < 6": resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w== +typescript@5.5.2: + version "5.5.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.5.2.tgz#c26f023cb0054e657ce04f72583ea2d85f8d0507" + integrity sha512-NcRtPEOsPFFWjobJEtfihkLCZCXZt/os3zf8nTxjVH3RvTSxjrCamJpbExGvYOF+tFHc3pA65qpdwPbzjohhew== + typescript@^3.9.10, typescript@^3.9.5, typescript@^3.9.7: version "3.9.10" resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.10.tgz#70f3910ac7a51ed6bef79da7800690b19bf778b8" @@ -22329,6 +22429,16 @@ wcwidth@^1.0.0, wcwidth@^1.0.1: dependencies: defaults "^1.0.3" +web-streams-polyfill@4.0.0-beta.3: + version "4.0.0-beta.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38" + integrity sha512-QW95TCTaHmsYfHDybGMwO5IJIM93I/6vTRk+daHTWFPhwh+C8Cg7j7XyKrwrj8Ib6vYXe0ocYNrmzY4xAAN6ug== + +web-streams-polyfill@^3.2.1: + version "3.3.3" + resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.3.3.tgz#2073b91a2fdb1fbfbd401e7de0ac9f8214cecb4b" + integrity sha512-d2JWLCivmZYTSIoge9MsgFCZrt571BikcWGYkjC1khllbTeDlGqZ2D8vD8E/lJa8WGWbb7Plm8/XJYV7IJHZZw== + webfinger@^0.4.2: version "0.4.2" resolved "https://registry.yarnpkg.com/webfinger/-/webfinger-0.4.2.tgz#3477a6d97799461896039fcffc650b73468ee76d"