diff --git a/hosting/couchdb/Dockerfile b/hosting/couchdb/Dockerfile index 36abc2dd19..ca72153e78 100644 --- a/hosting/couchdb/Dockerfile +++ b/hosting/couchdb/Dockerfile @@ -128,4 +128,4 @@ ADD couch/vm.args couch/local.ini ./etc/ WORKDIR / ADD runner.sh ./bbcouch-runner.sh RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau -CMD ["./bbcouch-runner.sh"] +CMD ["./bbcouch-runner.sh"] \ No newline at end of file diff --git a/hosting/couchdb/Dockerfile.v2 b/hosting/couchdb/Dockerfile.v2 new file mode 100644 index 0000000000..912344a903 --- /dev/null +++ b/hosting/couchdb/Dockerfile.v2 @@ -0,0 +1,135 @@ +# Modified from https://github.com/apache/couchdb-docker/blob/main/3.3.3/Dockerfile +# +# Everything in this `base` image is adapted from the official `couchdb` image's +# Dockerfile. Only modifications related to upgrading from Debian bullseye to +# bookworm have been included. The `runner` image contains Budibase's +# customisations to the image, e.g. adding Clouseau. +FROM node:20-slim AS base + +# Add CouchDB user account to make sure the IDs are assigned consistently +RUN groupadd -g 5984 -r couchdb && useradd -u 5984 -d /opt/couchdb -g couchdb couchdb + +# be sure GPG and apt-transport-https are available and functional +RUN set -ex; \ + apt-get update; \ + apt-get install -y --no-install-recommends \ + apt-transport-https \ + ca-certificates \ + dirmngr \ + gnupg \ + ; \ + rm -rf /var/lib/apt/lists/* + +# grab tini for signal handling and zombie reaping +# see https://github.com/apache/couchdb-docker/pull/28#discussion_r141112407 +RUN set -eux; \ + apt-get update; \ + apt-get install -y --no-install-recommends tini; \ + rm -rf /var/lib/apt/lists/*; \ + tini --version + +# http://docs.couchdb.org/en/latest/install/unix.html#installing-the-apache-couchdb-packages +ENV GPG_COUCH_KEY \ +# gpg: rsa8192 205-01-19 The Apache Software Foundation (Package repository signing key) + 390EF70BB1EA12B2773962950EE62FB37A00258D +RUN set -eux; \ + apt-get update; \ + apt-get install -y curl; \ + export GNUPGHOME="$(mktemp -d)"; \ + curl -fL -o keys.asc https://couchdb.apache.org/repo/keys.asc; \ + gpg --batch --import keys.asc; \ + gpg --batch --export "${GPG_COUCH_KEY}" > /usr/share/keyrings/couchdb-archive-keyring.gpg; \ + command -v gpgconf && gpgconf --kill all || :; \ + rm -rf "$GNUPGHOME"; \ + apt-key list; \ + apt purge -y --autoremove curl; \ + rm -rf /var/lib/apt/lists/* + +ENV COUCHDB_VERSION 3.3.3 + +RUN . /etc/os-release; \ + echo "deb [signed-by=/usr/share/keyrings/couchdb-archive-keyring.gpg] https://apache.jfrog.io/artifactory/couchdb-deb/ ${VERSION_CODENAME} main" | \ + tee /etc/apt/sources.list.d/couchdb.list >/dev/null + +# https://github.com/apache/couchdb-pkg/blob/master/debian/README.Debian +RUN set -eux; \ + apt-get update; \ + \ + echo "couchdb couchdb/mode select none" | debconf-set-selections; \ +# we DO want recommends this time + DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-downgrades --allow-remove-essential --allow-change-held-packages \ + couchdb="$COUCHDB_VERSION"~bookworm \ + ; \ +# Undo symlinks to /var/log and /var/lib + rmdir /var/lib/couchdb /var/log/couchdb; \ + rm /opt/couchdb/data /opt/couchdb/var/log; \ + mkdir -p /opt/couchdb/data /opt/couchdb/var/log; \ + chown couchdb:couchdb /opt/couchdb/data /opt/couchdb/var/log; \ + chmod 777 /opt/couchdb/data /opt/couchdb/var/log; \ +# Remove file that sets logging to a file + rm /opt/couchdb/etc/default.d/10-filelog.ini; \ +# Check we own everything in /opt/couchdb. Matches the command in dockerfile_entrypoint.sh + find /opt/couchdb \! \( -user couchdb -group couchdb \) -exec chown -f couchdb:couchdb '{}' +; \ +# Setup directories and permissions for config. Technically these could be 555 and 444 respectively +# but we keep them as 755 and 644 for consistency with CouchDB defaults and the dockerfile_entrypoint.sh. + find /opt/couchdb/etc -type d ! -perm 0755 -exec chmod -f 0755 '{}' +; \ + find /opt/couchdb/etc -type f ! -perm 0644 -exec chmod -f 0644 '{}' +; \ +# only local.d needs to be writable for the docker_entrypoint.sh + chmod -f 0777 /opt/couchdb/etc/local.d; \ +# apt clean-up + rm -rf /var/lib/apt/lists/*; + +# Add configuration +COPY --chown=couchdb:couchdb couch/10-docker-default.ini /opt/couchdb/etc/default.d/ +# COPY --chown=couchdb:couchdb vm.args /opt/couchdb/etc/ + +COPY docker-entrypoint.sh /usr/local/bin +RUN ln -s usr/local/bin/docker-entrypoint.sh /docker-entrypoint.sh # backwards compat +ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"] + +VOLUME /opt/couchdb/data + +# 5984: Main CouchDB endpoint +# 4369: Erlang portmap daemon (epmd) +# 9100: CouchDB cluster communication port +EXPOSE 5984 4369 9100 +CMD ["/opt/couchdb/bin/couchdb"] + +FROM base as runner + +ENV COUCHDB_USER admin +ENV COUCHDB_PASSWORD admin +EXPOSE 5984 +EXPOSE 4984 + +RUN apt-get update && apt-get install -y --no-install-recommends software-properties-common wget unzip curl && \ + wget -O - https://packages.adoptium.net/artifactory/api/gpg/key/public | apt-key add - && \ + apt-add-repository 'deb http://security.debian.org/debian-security bookworm-security/updates main' && \ + apt-add-repository 'deb http://archive.debian.org/debian stretch-backports main' && \ + apt-add-repository 'deb https://packages.adoptium.net/artifactory/deb bookworm main' && \ + apt-get update && apt-get install -y --no-install-recommends temurin-8-jdk && \ + rm -rf /var/lib/apt/lists/ + +# setup clouseau +WORKDIR / +RUN wget https://github.com/cloudant-labs/clouseau/releases/download/2.21.0/clouseau-2.21.0-dist.zip && \ + unzip clouseau-2.21.0-dist.zip && \ + mv clouseau-2.21.0 /opt/clouseau && \ + rm clouseau-2.21.0-dist.zip + +WORKDIR /opt/clouseau +RUN mkdir ./bin +ADD clouseau/clouseau ./bin/ +ADD clouseau/log4j.properties clouseau/clouseau.ini ./ + +# setup CouchDB +WORKDIR /opt/couchdb +ADD couch/vm.args couch/local.ini ./etc/ + +WORKDIR /opt/sqs +ADD sqs/sqs sqs/better_sqlite3.node ./ + +WORKDIR / +ADD runner.v2.sh ./bbcouch-runner.sh +RUN chmod +x ./bbcouch-runner.sh /opt/clouseau/bin/clouseau /opt/sqs/sqs +CMD ["./bbcouch-runner.sh"] diff --git a/hosting/couchdb/runner.v2.sh b/hosting/couchdb/runner.v2.sh new file mode 100644 index 0000000000..7ee24327a1 --- /dev/null +++ b/hosting/couchdb/runner.v2.sh @@ -0,0 +1,88 @@ +#!/bin/bash + +DATA_DIR=${DATA_DIR:-/data} +COUCHDB_ERLANG_COOKIE=${COUCHDB_ERLANG_COOKIE:-B9CFC32C-3458-4A86-8448-B3C753991CA7} + +mkdir -p ${DATA_DIR} +mkdir -p ${DATA_DIR}/couch/{dbs,views} +mkdir -p ${DATA_DIR}/search +chown -R couchdb:couchdb ${DATA_DIR}/couch + +echo ${TARGETBUILD} > /buildtarget.txt +if [[ "${TARGETBUILD}" = "aas" ]]; then + # Azure AppService uses /home for persistent data & SSH on port 2222 + DATA_DIR="${DATA_DIR:-/home}" + WEBSITES_ENABLE_APP_SERVICE_STORAGE=true + mkdir -p $DATA_DIR/{search,minio,couch} + mkdir -p $DATA_DIR/couch/{dbs,views} + chown -R couchdb:couchdb $DATA_DIR/couch/ + apt update + apt-get install -y openssh-server + echo "root:Docker!" | chpasswd + mkdir -p /tmp + chmod +x /tmp/ssh_setup.sh \ + && (sleep 1;/tmp/ssh_setup.sh 2>&1 > /dev/null) + cp /etc/sshd_config /etc/ssh/sshd_config + /etc/init.d/ssh restart + sed -i "s#DATA_DIR#/home#g" /opt/clouseau/clouseau.ini + sed -i "s#DATA_DIR#/home#g" /opt/couchdb/etc/local.ini +elif [[ "${TARGETBUILD}" = "single" ]]; then + # In the single image build, the Dockerfile specifies /data as a volume + # mount, so we use that for all persistent data. + sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini + sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini +elif [[ "${TARGETBUILD}" = "docker-compose" ]]; then + # We remove the database_dir and view_index_dir settings from the local.ini + # in docker-compose because it will default to /opt/couchdb/data which is what + # our docker-compose was using prior to us switching to using our own CouchDB + # image. + sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini + sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini + sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini +elif [[ -n $KUBERNETES_SERVICE_HOST ]]; then + # In Kubernetes the directory /opt/couchdb/data has a persistent volume + # mount for storing database data. + sed -i "s#^dir=.*\$#dir=/opt/couchdb/data#g" /opt/clouseau/clouseau.ini + + # We remove the database_dir and view_index_dir settings from the local.ini + # in Kubernetes because it will default to /opt/couchdb/data which is what + # our Helm chart was using prior to us switching to using our own CouchDB + # image. + sed -i "s#^database_dir.*\$##g" /opt/couchdb/etc/local.ini + sed -i "s#^view_index_dir.*\$##g" /opt/couchdb/etc/local.ini + + # We remove the -name setting from the vm.args file in Kubernetes because + # it will default to the pod FQDN, which is what's required for clustering + # to work. + sed -i "s/^-name .*$//g" /opt/couchdb/etc/vm.args +else + # For all other builds, we use /data for persistent data. + sed -i "s#DATA_DIR#/data#g" /opt/clouseau/clouseau.ini + sed -i "s#DATA_DIR#/data#g" /opt/couchdb/etc/local.ini +fi + +sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/couchdb/etc/vm.args +sed -i "s#COUCHDB_ERLANG_COOKIE#${COUCHDB_ERLANG_COOKIE}#g" /opt/clouseau/clouseau.ini + +# Start Clouseau. Budibase won't function correctly without Clouseau running, it +# powers the search API endpoints which are used to do all sorts, including +# populating app grids. +/opt/clouseau/bin/clouseau > /dev/stdout 2>&1 & + +# Start CouchDB. +/docker-entrypoint.sh /opt/couchdb/bin/couchdb & + +# Start SQS. +/opt/sqs/sqs --server "http://localhost:5984" --data-dir ${DATA_DIR}/sqs --bind-address=0.0.0.0 & + +# Wait for CouchDB to start up. +while [[ $(curl -s -w "%{http_code}\n" http://localhost:5984/_up -o /dev/null) -ne 200 ]]; do + echo 'Waiting for CouchDB to start...'; + sleep 5; +done + +# CouchDB needs the `_users` and `_replicator` databases to exist before it will +# function correctly, so we create them here. +curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_users +curl -X PUT -u "${COUCHDB_USER}:${COUCHDB_PASSWORD}" http://localhost:5984/_replicator +sleep infinity diff --git a/hosting/couchdb/sqs/better_sqlite3.node b/hosting/couchdb/sqs/better_sqlite3.node new file mode 100755 index 0000000000..bec4c79958 Binary files /dev/null and b/hosting/couchdb/sqs/better_sqlite3.node differ diff --git a/hosting/couchdb/sqs/sqs b/hosting/couchdb/sqs/sqs new file mode 100755 index 0000000000..d445ab8149 Binary files /dev/null and b/hosting/couchdb/sqs/sqs differ diff --git a/hosting/docker-compose.dev.yaml b/hosting/docker-compose.dev.yaml index 394f5ac256..9dba5d427c 100644 --- a/hosting/docker-compose.dev.yaml +++ b/hosting/docker-compose.dev.yaml @@ -40,7 +40,6 @@ services: - PROXY_ADDRESS=host.docker.internal couchdb-service: - # platform: linux/amd64 container_name: budi-couchdb3-dev restart: on-failure image: budibase/couchdb diff --git a/package.json b/package.json index 4b6716f7e7..d99304423a 100644 --- a/package.json +++ b/package.json @@ -74,6 +74,7 @@ "build:docker:single": "./scripts/build-single-image.sh", "build:docker:dependencies": "docker build -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest ./hosting", "publish:docker:couch": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile -t budibase/couchdb:latest -t budibase/couchdb:v3.2.1 --push ./hosting/couchdb", + "publish:docker:couch-sqs": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/couchdb/Dockerfile.v2 -t budibase/couchdb:v3.2.1-sqs --push ./hosting/couchdb", "publish:docker:dependencies": "docker buildx build --platform linux/arm64,linux/amd64 -f hosting/dependencies/Dockerfile -t budibase/dependencies:latest -t budibase/dependencies:v3.2.1 --push ./hosting", "release:helm": "node scripts/releaseHelmChart", "env:multi:enable": "lerna run --stream env:multi:enable", diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index f4caac502e..c11c227b66 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -66,3 +66,4 @@ export const APP_PREFIX = prefixed(DocumentType.APP) export const APP_DEV = prefixed(DocumentType.APP_DEV) export const APP_DEV_PREFIX = APP_DEV export const BUDIBASE_DATASOURCE_TYPE = "budibase" +export const SQLITE_DESIGN_DOC_ID = "_design/sqlite" diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 416313f520..c1347f4f2b 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -18,6 +18,7 @@ import { directCouchUrlCall } from "./utils" import { getPouchDB } from "./pouchDB" import { WriteStream, ReadStream } from "fs" import { newid } from "../../docIds/newid" +import { SQLITE_DESIGN_DOC_ID } from "../../constants" import { DDInstrumentedDatabase } from "../instrumentation" const DATABASE_NOT_FOUND = "Database does not exist." @@ -247,6 +248,21 @@ export class DatabaseImpl implements Database { }) } + async sql(sql: string): Promise { + const dbName = this.name + const url = `/${dbName}/${SQLITE_DESIGN_DOC_ID}` + const response = await directCouchUrlCall({ + url: `${this.couchInfo.sqlUrl}/${url}`, + method: "POST", + cookie: this.couchInfo.cookie, + body: sql, + }) + if (response.status > 300) { + throw new Error(await response.text()) + } + return (await response.json()) as T[] + } + async query( viewName: string, params: DatabaseQueryOpts diff --git a/packages/backend-core/src/db/couch/connections.ts b/packages/backend-core/src/db/couch/connections.ts index 4214c7cdc6..8dbbe34e3a 100644 --- a/packages/backend-core/src/db/couch/connections.ts +++ b/packages/backend-core/src/db/couch/connections.ts @@ -25,6 +25,7 @@ export const getCouchInfo = (connection?: string) => { const authCookie = Buffer.from(`${username}:${password}`).toString("base64") return { url: urlInfo.url!, + sqlUrl: env.COUCH_DB_SQL_URL, auth: { username: username, password: password, diff --git a/packages/backend-core/src/db/couch/utils.ts b/packages/backend-core/src/db/couch/utils.ts index 51b2a38998..005b02a896 100644 --- a/packages/backend-core/src/db/couch/utils.ts +++ b/packages/backend-core/src/db/couch/utils.ts @@ -30,8 +30,13 @@ export async function directCouchUrlCall({ }, } if (body && method !== "GET") { - params.body = JSON.stringify(body) - params.headers["Content-Type"] = "application/json" + if (typeof body === "string") { + params.body = body + params.headers["Content-Type"] = "text/plain" + } else { + params.body = JSON.stringify(body) + params.headers["Content-Type"] = "application/json" + } } return await fetch(checkSlashesInUrl(encodeURI(url)), params) } diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index 795f30d7cd..880f0a3c72 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -149,4 +149,11 @@ export class DDInstrumentedDatabase implements Database { return this.db.getIndexes(...args) }) } + + sql(sql: string): Promise { + return tracer.trace("db.sql", span => { + span?.addTags({ db_name: this.name }) + return this.db.sql(sql) + }) + } } diff --git a/packages/backend-core/src/db/lucene.ts b/packages/backend-core/src/db/lucene.ts index 987d750d45..4a2cfd34e2 100644 --- a/packages/backend-core/src/db/lucene.ts +++ b/packages/backend-core/src/db/lucene.ts @@ -1,28 +1,16 @@ import fetch from "node-fetch" import { getCouchInfo } from "./couch" -import { SearchFilters, Row, EmptyFilterOption } from "@budibase/types" +import { + SearchFilters, + Row, + EmptyFilterOption, + SearchResponse, + SearchParams, + WithRequired, +} from "@budibase/types" const QUERY_START_REGEX = /\d[0-9]*:/g -interface SearchResponse { - rows: T[] | any[] - bookmark?: string - totalRows: number -} - -export type SearchParams = { - tableId?: string - sort?: string - sortOrder?: string - sortType?: string - limit?: number - bookmark?: string - version?: string - indexer?: () => Promise - disableEscaping?: boolean - rows?: T | Row[] -} - export function removeKeyNumbering(key: any): string { if (typeof key === "string" && key.match(QUERY_START_REGEX) != null) { const parts = key.split(":") @@ -44,7 +32,7 @@ export class QueryBuilder { #query: SearchFilters #limit: number #sort?: string - #bookmark?: string + #bookmark?: string | number #sortOrder: string #sortType: string #includeDocs: boolean @@ -130,7 +118,7 @@ export class QueryBuilder { return this } - setBookmark(bookmark?: string) { + setBookmark(bookmark?: string | number) { if (bookmark != null) { this.#bookmark = bookmark } @@ -226,14 +214,20 @@ export class QueryBuilder { } } - /** - * Preprocesses a value before going into a lucene search. - * Transforms strings to lowercase and wraps strings and bools in quotes. - * @param value The value to process - * @param options The preprocess options - * @returns {string|*} - */ - preprocess(value: any, { escape, lowercase, wrap, type }: any = {}) { + preprocess( + value: any, + { + escape, + lowercase, + wrap, + type, + }: { + escape?: boolean + lowercase?: boolean + wrap?: boolean + type?: string + } = {} + ): string | any { const hasVersion = !!this.#version // Determine if type needs wrapped const originalType = typeof value @@ -561,7 +555,7 @@ async function runQuery( url: string, body: any, cookie: string -): Promise> { +): Promise, "totalRows">> { const response = await fetch(url, { body: JSON.stringify(body), method: "POST", @@ -575,7 +569,7 @@ async function runQuery( } const json = await response.json() - let output: SearchResponse = { + let output: WithRequired, "totalRows"> = { rows: [], totalRows: 0, } @@ -613,63 +607,51 @@ async function recursiveSearch( dbName: string, index: string, query: any, - params: any + params: SearchParams ): Promise { const bookmark = params.bookmark const rows = params.rows || [] - if (rows.length >= params.limit) { + if (params.limit && rows.length >= params.limit) { return rows } let pageSize = QueryBuilder.maxLimit - if (rows.length > params.limit - QueryBuilder.maxLimit) { + if (params.limit && rows.length > params.limit - QueryBuilder.maxLimit) { pageSize = params.limit - rows.length } - const page = await new QueryBuilder(dbName, index, query) + const queryBuilder = new QueryBuilder(dbName, index, query) + queryBuilder .setVersion(params.version) - .setTable(params.tableId) .setBookmark(bookmark) .setLimit(pageSize) .setSort(params.sort) .setSortOrder(params.sortOrder) .setSortType(params.sortType) - .run() + + if (params.tableId) { + queryBuilder.setTable(params.tableId) + } + + const page = await queryBuilder.run() if (!page.rows.length) { return rows } if (page.rows.length < QueryBuilder.maxLimit) { return [...rows, ...page.rows] } - const newParams = { + const newParams: SearchParams = { ...params, bookmark: page.bookmark, - rows: [...rows, ...page.rows], + rows: [...rows, ...page.rows] as Row[], } return await recursiveSearch(dbName, index, query, newParams) } -/** - * Performs a paginated search. A bookmark will be returned to allow the next - * page to be fetched. There is a max limit off 200 results per page in a - * paginated search. - * @param dbName Which database to run a lucene query on - * @param index Which search index to utilise - * @param query The JSON query structure - * @param params The search params including: - * tableId {string} The table ID to search - * sort {string} The sort column - * sortOrder {string} The sort order ("ascending" or "descending") - * sortType {string} Whether to treat sortable values as strings or - * numbers. ("string" or "number") - * limit {number} The desired page size - * bookmark {string} The bookmark to resume from - * @returns {Promise<{hasNextPage: boolean, rows: *[]}>} - */ export async function paginatedSearch( dbName: string, index: string, query: SearchFilters, - params: SearchParams -) { + params: SearchParams +): Promise> { let limit = params.limit if (limit == null || isNaN(limit) || limit < 0) { limit = 50 @@ -713,29 +695,12 @@ export async function paginatedSearch( } } -/** - * Performs a full search, fetching multiple pages if required to return the - * desired amount of results. There is a limit of 1000 results to avoid - * heavy performance hits, and to avoid client components breaking from - * handling too much data. - * @param dbName Which database to run a lucene query on - * @param index Which search index to utilise - * @param query The JSON query structure - * @param params The search params including: - * tableId {string} The table ID to search - * sort {string} The sort column - * sortOrder {string} The sort order ("ascending" or "descending") - * sortType {string} Whether to treat sortable values as strings or - * numbers. ("string" or "number") - * limit {number} The desired number of results - * @returns {Promise<{rows: *}>} - */ export async function fullSearch( dbName: string, index: string, query: SearchFilters, - params: SearchParams -) { + params: SearchParams +): Promise<{ rows: Row[] }> { let limit = params.limit if (limit == null || isNaN(limit) || limit < 0) { limit = 1000 diff --git a/packages/backend-core/src/db/tests/lucene.spec.ts b/packages/backend-core/src/db/tests/lucene.spec.ts index 7716661d88..c41bdf88d1 100644 --- a/packages/backend-core/src/db/tests/lucene.spec.ts +++ b/packages/backend-core/src/db/tests/lucene.spec.ts @@ -1,23 +1,39 @@ import { newid } from "../../docIds/newid" import { getDB } from "../db" -import { Database, EmptyFilterOption } from "@budibase/types" -import { QueryBuilder, paginatedSearch, fullSearch } from "../lucene" +import { + Database, + EmptyFilterOption, + SortOrder, + SortType, + DocumentType, + SEPARATOR, +} from "@budibase/types" +import { fullSearch, paginatedSearch, QueryBuilder } from "../lucene" const INDEX_NAME = "main" +const TABLE_ID = DocumentType.TABLE + SEPARATOR + newid() const index = `function(doc) { - let props = ["property", "number", "array"] - for (let key of props) { - if (Array.isArray(doc[key])) { - for (let val of doc[key]) { + if (!doc._id.startsWith("ro_")) { + return + } + let keys = Object.keys(doc).filter(key => !key.startsWith("_")) + for (let key of keys) { + const value = doc[key] + if (Array.isArray(value)) { + for (let val of value) { index(key, val) } - } else if (doc[key]) { - index(key, doc[key]) + } else if (value) { + index(key, value) } } }` +function rowId(id?: string) { + return DocumentType.ROW + SEPARATOR + (id || newid()) +} + describe("lucene", () => { let db: Database, dbName: string @@ -25,10 +41,21 @@ describe("lucene", () => { dbName = `db-${newid()}` // create the DB for testing db = getDB(dbName) - await db.put({ _id: newid(), property: "word", array: ["1", "4"] }) - await db.put({ _id: newid(), property: "word2", array: ["3", "1"] }) await db.put({ - _id: newid(), + _id: rowId(), + tableId: TABLE_ID, + property: "word", + array: ["1", "4"], + }) + await db.put({ + _id: rowId(), + tableId: TABLE_ID, + property: "word2", + array: ["3", "1"], + }) + await db.put({ + _id: rowId(), + tableId: TABLE_ID, property: "word3", number: 1, array: ["1", "2"], @@ -240,7 +267,8 @@ describe("lucene", () => { docs = Array(QueryBuilder.maxLimit * 2.5) .fill(0) .map((_, i) => ({ - _id: i.toString().padStart(3, "0"), + _id: rowId(i.toString().padStart(3, "0")), + tableId: TABLE_ID, property: `value_${i.toString().padStart(3, "0")}`, array: [], })) @@ -338,10 +366,11 @@ describe("lucene", () => { }, }, { + tableId: TABLE_ID, limit: 1, sort: "property", - sortType: "string", - sortOrder: "desc", + sortType: SortType.STRING, + sortOrder: SortOrder.DESCENDING, } ) expect(page.rows.length).toBe(1) @@ -360,7 +389,10 @@ describe("lucene", () => { property: "wo", }, }, - {} + { + tableId: TABLE_ID, + query: {}, + } ) expect(page.rows.length).toBe(3) }) diff --git a/packages/backend-core/src/index.ts b/packages/backend-core/src/index.ts index 8001017092..5ce35ee760 100644 --- a/packages/backend-core/src/index.ts +++ b/packages/backend-core/src/index.ts @@ -32,7 +32,6 @@ export { default as env } from "./environment" export * as blacklist from "./blacklist" export * as docUpdates from "./docUpdates" export * from "./utils/Duration" -export { SearchParams } from "./db" export * as docIds from "./docIds" export * as security from "./security" // Add context to tenancy for backwards compatibility diff --git a/packages/server/package.json b/packages/server/package.json index 4d1df4d734..59f06ea399 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -83,7 +83,7 @@ "joi": "17.6.0", "js-yaml": "4.1.0", "jsonschema": "1.4.0", - "knex": "2.4.0", + "knex": "2.4.2", "koa": "2.13.4", "koa-body": "4.2.0", "koa-compress": "4.0.1", @@ -109,6 +109,8 @@ "server-destroy": "1.0.1", "snowflake-promise": "^4.5.0", "socket.io": "4.6.1", + "sqlite3": "5.1.6", + "swagger-parser": "10.0.3", "tar": "6.1.15", "to-json-schema": "0.2.5", "undici": "^6.0.1", diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index f89c9dc51a..7fc0333de1 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -6,12 +6,10 @@ import { FieldType, FilterType, IncludeRelationship, - ManyToManyRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata, Operation, PaginationJson, RelationshipFieldMetadata, - RelationshipsJson, Row, SearchFilters, SortJson, @@ -23,14 +21,20 @@ import { breakExternalTableId, breakRowIdField, convertRowId, - generateRowIdField, isRowId, isSQL, + generateRowIdField, } from "../../../integrations/utils" +import { + buildExternalRelationships, + buildSqlFieldList, + generateIdForRow, + sqlOutputProcessing, + isManyToMany, +} from "./utils" import { getDatasourceAndQuery } from "../../../sdk/app/rows/utils" import { processObjectSync } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" -import { processDates, processFormulas } from "../../../utilities/rowProcessor" import { db as dbCore } from "@budibase/backend-core" import AliasTables from "./alias" import sdk from "../../../sdk" @@ -154,7 +158,8 @@ function cleanupConfig(config: RunConfig, table: Table): RunConfig { // filter out fields which cannot be keys const fieldNames = Object.entries(table.schema) .filter(schema => primaryOptions.find(val => val === schema[1].type)) - .map(([fieldName]) => fieldName) + // map to fieldName + .map(entry => entry[0]) const iterateObject = (obj: { [key: string]: any }) => { for (let [field, value] of Object.entries(obj)) { if (fieldNames.find(name => name === field) && isRowId(value)) { @@ -183,34 +188,6 @@ function cleanupConfig(config: RunConfig, table: Table): RunConfig { return config } -function generateIdForRow( - row: Row | undefined, - table: Table, - isLinked: boolean = false -): string { - const primary = table.primary - if (!row || !primary) { - return "" - } - // build id array - let idParts = [] - for (let field of primary) { - let fieldValue = extractFieldValue({ - row, - tableName: table.name, - fieldName: field, - isLinked, - }) - if (fieldValue != null) { - idParts.push(fieldValue) - } - } - if (idParts.length === 0) { - return "" - } - return generateRowIdField(idParts) -} - function getEndpoint(tableId: string | undefined, operation: string) { if (!tableId) { throw new Error("Cannot get endpoint information - no table ID specified") @@ -223,71 +200,6 @@ function getEndpoint(tableId: string | undefined, operation: string) { } } -// need to handle table name + field or just field, depending on if relationships used -function extractFieldValue({ - row, - tableName, - fieldName, - isLinked, -}: { - row: Row - tableName: string - fieldName: string - isLinked: boolean -}) { - let value = row[`${tableName}.${fieldName}`] - if (value == null && !isLinked) { - value = row[fieldName] - } - return value -} - -function basicProcessing({ - row, - table, - isLinked, -}: { - row: Row - table: Table - isLinked: boolean -}): Row { - const thisRow: Row = {} - // filter the row down to what is actually the row (not joined) - for (let field of Object.values(table.schema)) { - const fieldName = field.name - - const value = extractFieldValue({ - row, - tableName: table.name, - fieldName, - isLinked, - }) - - // all responses include "select col as table.col" so that overlaps are handled - if (value != null) { - thisRow[fieldName] = value - } - } - thisRow._id = generateIdForRow(row, table, isLinked) - thisRow.tableId = table._id - thisRow._rev = "rev" - return thisRow -} - -function fixArrayTypes(row: Row, table: Table) { - for (let [fieldName, schema] of Object.entries(table.schema)) { - if (schema.type === FieldType.ARRAY && typeof row[fieldName] === "string") { - try { - row[fieldName] = JSON.parse(row[fieldName]) - } catch (err) { - // couldn't convert back to array, ignore - delete row[fieldName] - } - } - } - return row -} - function isOneSide( field: RelationshipFieldMetadata ): field is OneToManyRelationshipFieldMetadata { @@ -296,12 +208,6 @@ function isOneSide( ) } -function isManyToMany( - field: RelationshipFieldMetadata -): field is ManyToManyRelationshipFieldMetadata { - return !!(field as ManyToManyRelationshipFieldMetadata).through -} - function isEditableColumn(column: FieldSchema) { const isExternalAutoColumn = column.autocolumn && @@ -435,187 +341,6 @@ export class ExternalRequest { return { row: newRow, manyRelationships } } - async processRelationshipFields( - table: Table, - row: Row, - relationships: RelationshipsJson[] - ): Promise { - for (let relationship of relationships) { - const linkedTable = this.tables[relationship.tableName] - if (!linkedTable || !row[relationship.column]) { - continue - } - for (let key of Object.keys(row[relationship.column])) { - let relatedRow: Row = row[relationship.column][key] - // add this row as context for the relationship - for (let col of Object.values(linkedTable.schema)) { - if (col.type === FieldType.LINK && col.tableId === table._id) { - relatedRow[col.name] = [row] - } - } - // process additional types - relatedRow = processDates(table, relatedRow) - relatedRow = await processFormulas(linkedTable, relatedRow) - row[relationship.column][key] = relatedRow - } - } - return row - } - - /** - * This iterates through the returned rows and works out what elements of the rows - * actually match up to another row (based on primary keys) - this is pretty specific - * to SQL and the way that SQL relationships are returned based on joins. - * This is complicated, but the idea is that when a SQL query returns all the relations - * will be separate rows, with all of the data in each row. We have to decipher what comes - * from where (which tables) and how to convert that into budibase columns. - */ - updateRelationshipColumns( - table: Table, - row: Row, - rows: { [key: string]: Row }, - relationships: RelationshipsJson[] - ) { - const columns: { [key: string]: any } = {} - for (let relationship of relationships) { - const linkedTable = this.tables[relationship.tableName] - if (!linkedTable) { - continue - } - const fromColumn = `${table.name}.${relationship.from}` - const toColumn = `${linkedTable.name}.${relationship.to}` - // this is important when working with multiple relationships - // between the same tables, don't want to overlap/multiply the relations - if ( - !relationship.through && - row[fromColumn]?.toString() !== row[toColumn]?.toString() - ) { - continue - } - - let linked = basicProcessing({ row, table: linkedTable, isLinked: true }) - if (!linked._id) { - continue - } - columns[relationship.column] = linked - } - for (let [column, related] of Object.entries(columns)) { - if (!row._id) { - continue - } - const rowId: string = row._id - if (!Array.isArray(rows[rowId][column])) { - rows[rowId][column] = [] - } - // make sure relationship hasn't been found already - if ( - !rows[rowId][column].find( - (relation: Row) => relation._id === related._id - ) - ) { - rows[rowId][column].push(related) - } - } - return rows - } - - async outputProcessing( - rows: Row[] = [], - table: Table, - relationships: RelationshipsJson[] - ) { - if (!rows || rows.length === 0 || rows[0].read === true) { - return [] - } - let finalRows: { [key: string]: Row } = {} - for (let row of rows) { - const rowId = generateIdForRow(row, table) - row._id = rowId - // this is a relationship of some sort - if (finalRows[rowId]) { - finalRows = this.updateRelationshipColumns( - table, - row, - finalRows, - relationships - ) - continue - } - const thisRow = fixArrayTypes( - basicProcessing({ row, table, isLinked: false }), - table - ) - if (thisRow._id == null) { - throw "Unable to generate row ID for SQL rows" - } - finalRows[thisRow._id] = thisRow - // do this at end once its been added to the final rows - finalRows = this.updateRelationshipColumns( - table, - row, - finalRows, - relationships - ) - } - - // make sure all related rows are correct - let finalRowArray = [] - for (let row of Object.values(finalRows)) { - finalRowArray.push( - await this.processRelationshipFields(table, row, relationships) - ) - } - - // process some additional types - finalRowArray = processDates(table, finalRowArray) - return finalRowArray - } - - /** - * Gets the list of relationship JSON structures based on the columns in the table, - * this will be used by the underlying library to build whatever relationship mechanism - * it has (e.g. SQL joins). - */ - buildRelationships(table: Table): RelationshipsJson[] { - const relationships = [] - for (let [fieldName, field] of Object.entries(table.schema)) { - if (field.type !== FieldType.LINK) { - continue - } - const { tableName: linkTableName } = breakExternalTableId(field.tableId) - // no table to link to, this is not a valid relationships - if (!linkTableName || !this.tables[linkTableName]) { - continue - } - const linkTable = this.tables[linkTableName] - if (!table.primary || !linkTable.primary) { - continue - } - const definition: RelationshipsJson = { - tableName: linkTableName, - // need to specify where to put this back into - column: fieldName, - } - if (isManyToMany(field)) { - const { tableName: throughTableName } = breakExternalTableId( - field.through - ) - definition.through = throughTableName - // don't support composite keys for relationships - definition.from = field.throughTo || table.primary[0] - definition.to = field.throughFrom || linkTable.primary[0] - definition.fromPrimary = table.primary[0] - definition.toPrimary = linkTable.primary[0] - } else { - // if no foreign key specified then use the name of the field in other table - definition.from = field.foreignKey || table.primary[0] - definition.to = field.fieldName - } - relationships.push(definition) - } - return relationships - } - /** * This is a cached lookup, of relationship records, this is mainly for creating/deleting junction * information. @@ -801,41 +526,6 @@ export class ExternalRequest { } } - /** - * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which - * you have column overlap in relationships, e.g. we join a few different tables and they all have the - * concept of an ID, but for some of them it will be null (if they say don't have a relationship). - * Creating the specific list of fields that we desire, and excluding the ones that are no use to us - * is more performant and has the added benefit of protecting against this scenario. - */ - buildFields(table: Table, includeRelations: boolean) { - function extractRealFields(table: Table, existing: string[] = []) { - return Object.entries(table.schema) - .filter( - column => - column[1].type !== FieldType.LINK && - column[1].type !== FieldType.FORMULA && - !existing.find((field: string) => field === column[0]) - ) - .map(column => `${table.name}.${column[0]}`) - } - let fields = extractRealFields(table) - for (let field of Object.values(table.schema)) { - if (field.type !== FieldType.LINK || !includeRelations) { - continue - } - const { tableName: linkTableName } = breakExternalTableId(field.tableId) - if (linkTableName) { - const linkTable = this.tables[linkTableName] - if (linkTable) { - const linkedFields = extractRealFields(linkTable, fields) - fields = fields.concat(linkedFields) - } - } - } - return fields - } - async run(config: RunConfig): Promise> { const { operation, tableId } = this let { datasourceId, tableName } = breakExternalTableId(tableId) @@ -869,14 +559,16 @@ export class ExternalRequest { delete sort?.[sortColumn] break case FieldType.NUMBER: - sort[sortColumn].type = SortType.number + if (sort && sort[sortColumn]) { + sort[sortColumn].type = SortType.NUMBER + } break } } filters = buildFilters(id, filters || {}, table) - const relationships = this.buildRelationships(table) + const relationships = buildExternalRelationships(table, this.tables) - const includeSqlRelationships = + const incRelationships = config.includeSqlRelationships === IncludeRelationship.INCLUDE // clean up row on ingress using schema @@ -896,7 +588,11 @@ export class ExternalRequest { }, resource: { // have to specify the fields to avoid column overlap (for SQL) - fields: isSql ? this.buildFields(table, includeSqlRelationships) : [], + fields: isSql + ? buildSqlFieldList(table, this.tables, { + relationships: incRelationships, + }) + : [], }, filters, sort, @@ -935,9 +631,10 @@ export class ExternalRequest { processed.manyRelationships ) } - const output = await this.outputProcessing( - responseRows, + const output = await sqlOutputProcessing( + response, table, + this.tables, relationships ) // if reading it'll just be an array of rows, return whole thing diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts index 60c207c8ce..0ec9d1a09c 100644 --- a/packages/server/src/api/controllers/row/alias.ts +++ b/packages/server/src/api/controllers/row/alias.ts @@ -155,7 +155,9 @@ export default class AliasTables { return map } - async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse { + async queryWithAliasing( + json: QueryJson + ): Promise { const datasourceId = json.endpoint.datasourceId const datasource = await sdk.datasources.get(datasourceId) diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index 3466a4491d..7dc70a3009 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -13,7 +13,7 @@ import { PatchRowRequest, PatchRowResponse, Row, - SearchParams, + RowSearchParams, SearchRowRequest, SearchRowResponse, UserCtx, @@ -192,7 +192,7 @@ export async function destroy(ctx: UserCtx) { export async function search(ctx: Ctx) { const tableId = utils.getTableId(ctx) - const searchParams: SearchParams = { + const searchParams: RowSearchParams = { ...ctx.request.body, tableId, } diff --git a/packages/server/src/api/controllers/row/utils.ts b/packages/server/src/api/controllers/row/utils.ts deleted file mode 100644 index ed6ccd4c53..0000000000 --- a/packages/server/src/api/controllers/row/utils.ts +++ /dev/null @@ -1,59 +0,0 @@ -import { InternalTables } from "../../../db/utils" -import * as userController from "../user" -import { context } from "@budibase/backend-core" -import { Ctx, Row, UserCtx } from "@budibase/types" - -import validateJs from "validate.js" - -validateJs.extend(validateJs.validators.datetime, { - parse: function (value: string) { - return new Date(value).getTime() - }, - // Input is a unix timestamp - format: function (value: string) { - return new Date(value).toISOString() - }, -}) - -export async function findRow(ctx: UserCtx, tableId: string, rowId: string) { - const db = context.getAppDB() - let row: Row - // TODO remove special user case in future - if (tableId === InternalTables.USER_METADATA) { - ctx.params = { - id: rowId, - } - await userController.findMetadata(ctx) - row = ctx.body - } else { - row = await db.get(rowId) - } - if (row.tableId !== tableId) { - throw "Supplied tableId does not match the rows tableId" - } - return row -} - -export function getTableId(ctx: Ctx): string { - // top priority, use the URL first - if (ctx.params?.sourceId) { - return ctx.params.sourceId - } - // now check for old way of specifying table ID - if (ctx.params?.tableId) { - return ctx.params.tableId - } - // check body for a table ID - if (ctx.request.body?.tableId) { - return ctx.request.body.tableId - } - // now check if a specific view name - if (ctx.params?.viewName) { - return ctx.params.viewName - } - throw new Error("Unable to find table ID in request") -} - -export function isUserMetadataTable(tableId: string) { - return tableId === InternalTables.USER_METADATA -} diff --git a/packages/server/src/api/controllers/row/utils/basic.ts b/packages/server/src/api/controllers/row/utils/basic.ts new file mode 100644 index 0000000000..2fb67b07d5 --- /dev/null +++ b/packages/server/src/api/controllers/row/utils/basic.ts @@ -0,0 +1,97 @@ +// need to handle table name + field or just field, depending on if relationships used +import { FieldType, Row, Table } from "@budibase/types" +import { generateRowIdField } from "../../../../integrations/utils" + +function extractFieldValue({ + row, + tableName, + fieldName, + isLinked, +}: { + row: Row + tableName: string + fieldName: string + isLinked: boolean +}) { + let value = row[`${tableName}.${fieldName}`] + if (value == null && !isLinked) { + value = row[fieldName] + } + return value +} + +export function generateIdForRow( + row: Row | undefined, + table: Table, + isLinked: boolean = false +): string { + const primary = table.primary + if (!row || !primary) { + return "" + } + // build id array + let idParts = [] + for (let field of primary) { + let fieldValue = extractFieldValue({ + row, + tableName: table.name, + fieldName: field, + isLinked, + }) + if (fieldValue != null) { + idParts.push(fieldValue) + } + } + if (idParts.length === 0) { + return "" + } + return generateRowIdField(idParts) +} + +export function basicProcessing({ + row, + table, + isLinked, + internal, +}: { + row: Row + table: Table + isLinked: boolean + internal?: boolean +}): Row { + const thisRow: Row = {} + // filter the row down to what is actually the row (not joined) + for (let field of Object.values(table.schema)) { + const fieldName = field.name + const value = extractFieldValue({ + row, + tableName: table.name, + fieldName, + isLinked, + }) + // all responses include "select col as table.col" so that overlaps are handled + if (value != null) { + thisRow[fieldName] = value + } + } + if (!internal) { + thisRow._id = generateIdForRow(row, table, isLinked) + thisRow.tableId = table._id + thisRow._rev = "rev" + } + return thisRow +} + +export function fixArrayTypes(row: Row, table: Table) { + for (let [fieldName, schema] of Object.entries(table.schema)) { + if (schema.type === FieldType.ARRAY && typeof row[fieldName] === "string") { + try { + row[fieldName] = JSON.parse(row[fieldName]) + } catch (err) { + // couldn't convert back to array, ignore + delete row[fieldName] + } + } + } + return row +} diff --git a/packages/server/src/api/controllers/row/utils/index.ts b/packages/server/src/api/controllers/row/utils/index.ts new file mode 100644 index 0000000000..ec88d01f22 --- /dev/null +++ b/packages/server/src/api/controllers/row/utils/index.ts @@ -0,0 +1,3 @@ +export * from "./basic" +export * from "./sqlUtils" +export * from "./utils" diff --git a/packages/server/src/api/controllers/row/utils/sqlUtils.ts b/packages/server/src/api/controllers/row/utils/sqlUtils.ts new file mode 100644 index 0000000000..2df829154f --- /dev/null +++ b/packages/server/src/api/controllers/row/utils/sqlUtils.ts @@ -0,0 +1,194 @@ +import { + FieldType, + ManyToManyRelationshipFieldMetadata, + RelationshipFieldMetadata, + RelationshipsJson, + Row, + Table, +} from "@budibase/types" +import { breakExternalTableId } from "../../../../integrations/utils" +import { basicProcessing } from "./basic" +import { generateJunctionTableID } from "../../../../db/utils" + +type TableMap = Record + +export function isManyToMany( + field: RelationshipFieldMetadata +): field is ManyToManyRelationshipFieldMetadata { + return !!(field as ManyToManyRelationshipFieldMetadata).through +} + +/** + * This iterates through the returned rows and works out what elements of the rows + * actually match up to another row (based on primary keys) - this is pretty specific + * to SQL and the way that SQL relationships are returned based on joins. + * This is complicated, but the idea is that when a SQL query returns all the relations + * will be separate rows, with all of the data in each row. We have to decipher what comes + * from where (which tables) and how to convert that into budibase columns. + */ +export async function updateRelationshipColumns( + table: Table, + tables: TableMap, + row: Row, + rows: { [key: string]: Row }, + relationships: RelationshipsJson[], + opts?: { internal?: boolean } +) { + const columns: { [key: string]: any } = {} + for (let relationship of relationships) { + const linkedTable = tables[relationship.tableName] + if (!linkedTable) { + continue + } + const fromColumn = `${table.name}.${relationship.from}` + const toColumn = `${linkedTable.name}.${relationship.to}` + // this is important when working with multiple relationships + // between the same tables, don't want to overlap/multiply the relations + if ( + !relationship.through && + row[fromColumn]?.toString() !== row[toColumn]?.toString() + ) { + continue + } + + let linked = await basicProcessing({ + row, + table: linkedTable, + isLinked: true, + internal: opts?.internal, + }) + if (!linked._id) { + continue + } + columns[relationship.column] = linked + } + for (let [column, related] of Object.entries(columns)) { + if (!row._id) { + continue + } + const rowId: string = row._id + if (!Array.isArray(rows[rowId][column])) { + rows[rowId][column] = [] + } + // make sure relationship hasn't been found already + if ( + !rows[rowId][column].find((relation: Row) => relation._id === related._id) + ) { + rows[rowId][column].push(related) + } + } + return rows +} + +/** + * Gets the list of relationship JSON structures based on the columns in the table, + * this will be used by the underlying library to build whatever relationship mechanism + * it has (e.g. SQL joins). + */ +export function buildExternalRelationships( + table: Table, + tables: TableMap +): RelationshipsJson[] { + const relationships = [] + for (let [fieldName, field] of Object.entries(table.schema)) { + if (field.type !== FieldType.LINK) { + continue + } + const { tableName: linkTableName } = breakExternalTableId(field.tableId) + // no table to link to, this is not a valid relationships + if (!linkTableName || !tables[linkTableName]) { + continue + } + const linkTable = tables[linkTableName] + if (!table.primary || !linkTable.primary) { + continue + } + const definition: RelationshipsJson = { + tableName: linkTableName, + // need to specify where to put this back into + column: fieldName, + } + if (isManyToMany(field)) { + const { tableName: throughTableName } = breakExternalTableId( + field.through + ) + definition.through = throughTableName + // don't support composite keys for relationships + definition.from = field.throughTo || table.primary[0] + definition.to = field.throughFrom || linkTable.primary[0] + definition.fromPrimary = table.primary[0] + definition.toPrimary = linkTable.primary[0] + } else { + // if no foreign key specified then use the name of the field in other table + definition.from = field.foreignKey || table.primary[0] + definition.to = field.fieldName + } + relationships.push(definition) + } + return relationships +} + +export function buildInternalRelationships(table: Table): RelationshipsJson[] { + const relationships: RelationshipsJson[] = [] + const links = Object.values(table.schema).filter( + column => column.type === FieldType.LINK + ) + const tableId = table._id! + for (let link of links) { + if (link.type !== FieldType.LINK) { + continue + } + const linkTableId = link.tableId! + const junctionTableId = generateJunctionTableID(tableId, linkTableId) + const isFirstTable = tableId > linkTableId + relationships.push({ + through: junctionTableId, + column: link.name, + tableName: linkTableId, + fromPrimary: "_id", + to: isFirstTable ? "doc2.rowId" : "doc1.rowId", + from: isFirstTable ? "doc1.rowId" : "doc2.rowId", + toPrimary: "_id", + }) + } + return relationships +} + +/** + * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which + * you have column overlap in relationships, e.g. we join a few different tables and they all have the + * concept of an ID, but for some of them it will be null (if they say don't have a relationship). + * Creating the specific list of fields that we desire, and excluding the ones that are no use to us + * is more performant and has the added benefit of protecting against this scenario. + */ +export function buildSqlFieldList( + table: Table, + tables: TableMap, + opts?: { relationships: boolean } +) { + function extractRealFields(table: Table, existing: string[] = []) { + return Object.entries(table.schema) + .filter( + column => + column[1].type !== FieldType.LINK && + column[1].type !== FieldType.FORMULA && + !existing.find((field: string) => field === column[0]) + ) + .map(column => `${table.name}.${column[0]}`) + } + let fields = extractRealFields(table) + for (let field of Object.values(table.schema)) { + if (field.type !== FieldType.LINK || !opts?.relationships) { + continue + } + const { tableName: linkTableName } = breakExternalTableId(field.tableId) + if (linkTableName) { + const linkTable = tables[linkTableName] + if (linkTable) { + const linkedFields = extractRealFields(linkTable, fields) + fields = fields.concat(linkedFields) + } + } + } + return fields +} diff --git a/packages/server/src/api/controllers/row/utils/utils.ts b/packages/server/src/api/controllers/row/utils/utils.ts new file mode 100644 index 0000000000..c77ccf63a9 --- /dev/null +++ b/packages/server/src/api/controllers/row/utils/utils.ts @@ -0,0 +1,182 @@ +import { InternalTables } from "../../../../db/utils" +import * as userController from "../../user" +import { context } from "@budibase/backend-core" +import { + Ctx, + DatasourcePlusQueryResponse, + FieldType, + RelationshipsJson, + Row, + Table, + UserCtx, +} from "@budibase/types" +import { + processDates, + processFormulas, +} from "../../../../utilities/rowProcessor" +import { updateRelationshipColumns } from "./sqlUtils" +import { basicProcessing, generateIdForRow, fixArrayTypes } from "./basic" +import sdk from "../../../../sdk" + +import validateJs from "validate.js" + +validateJs.extend(validateJs.validators.datetime, { + parse: function (value: string) { + return new Date(value).getTime() + }, + // Input is a unix timestamp + format: function (value: string) { + return new Date(value).toISOString() + }, +}) + +export async function processRelationshipFields( + table: Table, + tables: Record, + row: Row, + relationships: RelationshipsJson[] +): Promise { + for (let relationship of relationships) { + const linkedTable = tables[relationship.tableName] + if (!linkedTable || !row[relationship.column]) { + continue + } + for (let key of Object.keys(row[relationship.column])) { + let relatedRow: Row = row[relationship.column][key] + // add this row as context for the relationship + for (let col of Object.values(linkedTable.schema)) { + if (col.type === FieldType.LINK && col.tableId === table._id) { + relatedRow[col.name] = [row] + } + } + // process additional types + relatedRow = processDates(table, relatedRow) + relatedRow = await processFormulas(linkedTable, relatedRow) + row[relationship.column][key] = relatedRow + } + } + return row +} + +export async function findRow(ctx: UserCtx, tableId: string, rowId: string) { + const db = context.getAppDB() + let row: Row + // TODO remove special user case in future + if (tableId === InternalTables.USER_METADATA) { + ctx.params = { + id: rowId, + } + await userController.findMetadata(ctx) + row = ctx.body + } else { + row = await db.get(rowId) + } + if (row.tableId !== tableId) { + throw "Supplied tableId does not match the rows tableId" + } + return row +} + +export function getTableId(ctx: Ctx): string { + // top priority, use the URL first + if (ctx.params?.sourceId) { + return ctx.params.sourceId + } + // now check for old way of specifying table ID + if (ctx.params?.tableId) { + return ctx.params.tableId + } + // check body for a table ID + if (ctx.request.body?.tableId) { + return ctx.request.body.tableId + } + // now check if a specific view name + if (ctx.params?.viewName) { + return ctx.params.viewName + } + throw new Error("Unable to find table ID in request") +} + +export async function validate( + opts: { row: Row } & ({ tableId: string } | { table: Table }) +) { + let fetchedTable: Table + if ("tableId" in opts) { + fetchedTable = await sdk.tables.getTable(opts.tableId) + } else { + fetchedTable = opts.table + } + return sdk.rows.utils.validate({ + ...opts, + table: fetchedTable, + }) +} + +export async function sqlOutputProcessing( + rows: DatasourcePlusQueryResponse, + table: Table, + tables: Record, + relationships: RelationshipsJson[], + opts?: { internal?: boolean } +): Promise { + if (!Array.isArray(rows) || rows.length === 0 || rows[0].read === true) { + return [] + } + let finalRows: { [key: string]: Row } = {} + for (let row of rows as Row[]) { + let rowId = row._id + if (!rowId) { + rowId = generateIdForRow(row, table) + row._id = rowId + } + // this is a relationship of some sort + if (finalRows[rowId]) { + finalRows = await updateRelationshipColumns( + table, + tables, + row, + finalRows, + relationships, + opts + ) + continue + } + const thisRow = fixArrayTypes( + basicProcessing({ + row, + table, + isLinked: false, + internal: opts?.internal, + }), + table + ) + if (thisRow._id == null) { + throw new Error("Unable to generate row ID for SQL rows") + } + finalRows[thisRow._id] = thisRow + // do this at end once its been added to the final rows + finalRows = await updateRelationshipColumns( + table, + tables, + row, + finalRows, + relationships + ) + } + + // make sure all related rows are correct + let finalRowArray = [] + for (let row of Object.values(finalRows)) { + finalRowArray.push( + await processRelationshipFields(table, tables, row, relationships) + ) + } + + // process some additional types + finalRowArray = processDates(table, finalRowArray) + return finalRowArray +} + +export function isUserMetadataTable(tableId: string) { + return tableId === InternalTables.USER_METADATA +} diff --git a/packages/server/src/api/controllers/row/views.ts b/packages/server/src/api/controllers/row/views.ts index 2c6cb4b17a..2644446d82 100644 --- a/packages/server/src/api/controllers/row/views.ts +++ b/packages/server/src/api/controllers/row/views.ts @@ -4,8 +4,8 @@ import { SearchRowResponse, SearchViewRowRequest, RequiredKeys, - SearchParams, SearchFilters, + RowSearchParams, } from "@budibase/types" import { dataFilters } from "@budibase/shared-core" import sdk from "../../../sdk" @@ -57,7 +57,7 @@ export async function searchView( } const searchOptions: RequiredKeys & - RequiredKeys> = { + RequiredKeys> = { tableId: view.tableId, query, fields: viewFields, diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index f004921d08..d767ca9e98 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -2,7 +2,7 @@ import { InvalidFileExtensions } from "@budibase/shared-core" import AppComponent from "./templates/BudibaseApp.svelte" import { join } from "../../../utilities/centralPath" import * as uuid from "uuid" -import { ObjectStoreBuckets, devClientVersion } from "../../../constants" +import { devClientVersion, ObjectStoreBuckets } from "../../../constants" import { processString } from "@budibase/string-templates" import { loadHandlebarsFile, @@ -10,24 +10,24 @@ import { TOP_LEVEL_PATH, } from "../../../utilities/fileSystem" import env from "../../../environment" -import { DocumentType } from "../../../db/utils" import { + BadRequestError, + configs, context, objectStore, utils, - configs, - BadRequestError, } from "@budibase/backend-core" import AWS from "aws-sdk" import fs from "fs" import sdk from "../../../sdk" import * as pro from "@budibase/pro" import { - UserCtx, App, Ctx, - ProcessAttachmentResponse, + DocumentType, Feature, + ProcessAttachmentResponse, + UserCtx, } from "@budibase/types" import { getAppMigrationVersion, @@ -147,8 +147,7 @@ const requiresMigration = async (ctx: Ctx) => { const latestMigrationApplied = await getAppMigrationVersion(appId) - const requiresMigrations = latestMigrationApplied !== latestMigration - return requiresMigrations + return latestMigrationApplied !== latestMigration } export const serveApp = async function (ctx: UserCtx) { diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index 0c9933a4cf..c00a8c1bc2 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -33,6 +33,8 @@ import { FieldTypeSubtypes, AttachmentFieldMetadata, } from "@budibase/types" +import sdk from "../../../sdk" +import env from "../../../environment" export async function clearColumns(table: Table, columnNames: string[]) { const db = context.getAppDB() @@ -342,6 +344,9 @@ class TableSaveFunctions { importRows: this.importRows, user: this.user, }) + if (env.SQS_SEARCH_ENABLE) { + await sdk.tables.sqs.addTableToSqlite(table) + } return table } diff --git a/packages/server/src/api/controllers/view/utils.ts b/packages/server/src/api/controllers/view/utils.ts index 7f9ae1a9bc..1229ff0e0f 100644 --- a/packages/server/src/api/controllers/view/utils.ts +++ b/packages/server/src/api/controllers/view/utils.ts @@ -2,13 +2,18 @@ import { ViewName, generateMemoryViewID, getMemoryViewParams, - DocumentType, SEPARATOR, } from "../../../db/utils" import env from "../../../environment" import { context } from "@budibase/backend-core" import viewBuilder from "./viewBuilder" -import { Database, DBView, DesignDocument, InMemoryView } from "@budibase/types" +import { + Database, + DBView, + DocumentType, + DesignDocument, + InMemoryView, +} from "@budibase/types" export async function getView(viewName: string) { const db = context.getAppDB() diff --git a/packages/server/src/api/controllers/view/views.ts b/packages/server/src/api/controllers/view/views.ts index 7eb67f98fc..abcc8627f3 100644 --- a/packages/server/src/api/controllers/view/views.ts +++ b/packages/server/src/api/controllers/view/views.ts @@ -4,7 +4,6 @@ import { csv, json, jsonWithSchema, Format, isFormat } from "./exporters" import { deleteView, getView, getViews, saveView } from "./utils" import { fetchView } from "../row" import { context, events } from "@budibase/backend-core" -import { DocumentType } from "../../../db/utils" import sdk from "../../../sdk" import { FieldType, @@ -14,6 +13,7 @@ import { TableExportFormat, TableSchema, View, + DocumentType, } from "@budibase/types" import { builderSocket } from "../../../websockets" diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index d9895466a5..db10901367 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -1272,7 +1272,6 @@ describe.each([ ? {} : { hasNextPage: false, - bookmark: null, }), }) }) diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index a4ecd7c818..780fcb070e 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -652,7 +652,6 @@ describe.each([ ? {} : { hasNextPage: false, - bookmark: null, }), }) }) @@ -705,7 +704,6 @@ describe.each([ ? {} : { hasNextPage: false, - bookmark: null, }), }) }) @@ -813,7 +811,7 @@ describe.each([ { field: "age", order: SortOrder.ASCENDING, - type: SortType.number, + type: SortType.NUMBER, }, ["Danny", "Alice", "Charly", "Bob"], ], @@ -835,7 +833,7 @@ describe.each([ { field: "age", order: SortOrder.DESCENDING, - type: SortType.number, + type: SortType.NUMBER, }, ["Bob", "Charly", "Alice", "Danny"], ], diff --git a/packages/server/src/db/linkedRows/LinkDocument.ts b/packages/server/src/db/linkedRows/LinkDocument.ts index 59dc758c4c..8d25bbe93c 100644 --- a/packages/server/src/db/linkedRows/LinkDocument.ts +++ b/packages/server/src/db/linkedRows/LinkDocument.ts @@ -1,4 +1,4 @@ -import { generateLinkID } from "../utils" +import { generateLinkID, generateJunctionTableID } from "../utils" import { FieldType, LinkDocument } from "@budibase/types" /** @@ -16,6 +16,7 @@ import { FieldType, LinkDocument } from "@budibase/types" class LinkDocumentImpl implements LinkDocument { _id: string type: string + tableId: string doc1: { rowId: string fieldName: string @@ -43,16 +44,20 @@ class LinkDocumentImpl implements LinkDocument { fieldName2 ) this.type = FieldType.LINK - this.doc1 = { + this.tableId = generateJunctionTableID(tableId1, tableId2) + const docA = { tableId: tableId1, fieldName: fieldName1, rowId: rowId1, } - this.doc2 = { + const docB = { tableId: tableId2, fieldName: fieldName2, rowId: rowId2, } + // have to determine which one will be doc1 - very important for SQL linking + this.doc1 = docA.tableId > docB.tableId ? docA : docB + this.doc2 = docA.tableId > docB.tableId ? docB : docA } } diff --git a/packages/server/src/db/utils.ts b/packages/server/src/db/utils.ts index 983cbf423c..b1c02b1764 100644 --- a/packages/server/src/db/utils.ts +++ b/packages/server/src/db/utils.ts @@ -55,6 +55,14 @@ export const getUserMetadataParams = dbCore.getUserMetadataParams export const generateUserMetadataID = dbCore.generateUserMetadataID export const getGlobalIDFromUserMetadataID = dbCore.getGlobalIDFromUserMetadataID +export const CONSTANT_INTERNAL_ROW_COLS = [ + "_id", + "_rev", + "type", + "createdAt", + "updatedAt", + "tableId", +] /** * Gets parameters for retrieving tables, this is a utility function for the getDocParams function. @@ -286,6 +294,12 @@ export function generatePluginID(name: string) { return `${DocumentType.PLUGIN}${SEPARATOR}${name}` } +export function generateJunctionTableID(tableId1: string, tableId2: string) { + const first = tableId1 > tableId2 ? tableId1 : tableId2 + const second = tableId1 > tableId2 ? tableId2 : tableId1 + return `${first}${SEPARATOR}${second}` +} + /** * Generates a new view ID. * @returns The new view ID which the view doc can be stored under. diff --git a/packages/server/src/db/views/staticViews.ts b/packages/server/src/db/views/staticViews.ts index 700bd2568a..7218c6f8ce 100644 --- a/packages/server/src/db/views/staticViews.ts +++ b/packages/server/src/db/views/staticViews.ts @@ -1,6 +1,6 @@ import { context } from "@budibase/backend-core" -import { DocumentType, SEPARATOR, ViewName } from "../utils" -import { LinkDocument, Row, SearchIndex } from "@budibase/types" +import { SEPARATOR, ViewName } from "../utils" +import { DocumentType, LinkDocument, Row, SearchIndex } from "@budibase/types" const SCREEN_PREFIX = DocumentType.SCREEN + SEPARATOR diff --git a/packages/server/src/definitions/datasource.ts b/packages/server/src/definitions/datasource.ts index 1e436670a8..b90fa5db0c 100644 --- a/packages/server/src/definitions/datasource.ts +++ b/packages/server/src/definitions/datasource.ts @@ -6,4 +6,5 @@ export interface QueryOptions { disableReturning?: boolean + disableBindings?: boolean } diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts index a7c6df29ea..f8adcbe0ee 100644 --- a/packages/server/src/environment.ts +++ b/packages/server/src/environment.ts @@ -86,6 +86,7 @@ const environment = { SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE, SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE, + SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE, // flags ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS, DISABLE_THREADING: process.env.DISABLE_THREADING, diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts index 288489471b..cf80f2359f 100644 --- a/packages/server/src/integration-test/postgres.spec.ts +++ b/packages/server/src/integration-test/postgres.spec.ts @@ -685,7 +685,6 @@ describe("postgres integrations", () => { expect(res.body).toEqual({ rows: [], - bookmark: null, hasNextPage: false, }) }) @@ -710,7 +709,6 @@ describe("postgres integrations", () => { rows: expect.arrayContaining( rows.map(r => expect.objectContaining(r.rowData)) ), - bookmark: null, hasNextPage: false, }) expect(res.body.rows).toHaveLength(rowsCount) @@ -772,7 +770,6 @@ describe("postgres integrations", () => { expect(res.body).toEqual({ rows: expect.arrayContaining(rowsToFilter.map(expect.objectContaining)), - bookmark: null, hasNextPage: false, }) expect(res.body.rows).toHaveLength(4) diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts index b906ecbb1b..03e6028e32 100644 --- a/packages/server/src/integrations/base/query.ts +++ b/packages/server/src/integrations/base/query.ts @@ -9,7 +9,7 @@ import sdk from "../../sdk" export async function makeExternalQuery( datasource: Datasource, json: QueryJson -): DatasourcePlusQueryResponse { +): Promise { datasource = await sdk.datasources.enrich(datasource) const Integration = await getIntegration(datasource.source) // query is the opinionated function diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 2093d455b9..e99e34ab0f 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -1,7 +1,12 @@ import { Knex, knex } from "knex" import { db as dbCore } from "@budibase/backend-core" import { QueryOptions } from "../../definitions/datasource" -import { isIsoDateString, SqlClient, isValidFilter } from "../utils" +import { + isIsoDateString, + SqlClient, + isValidFilter, + getNativeSql, +} from "../utils" import SqlTableQueryBuilder from "./sqlTable" import { BBReferenceFieldMetadata, @@ -11,14 +16,16 @@ import { JsonFieldMetadata, Operation, QueryJson, + SqlQuery, RelationshipsJson, SearchFilters, SortDirection, + SqlQueryBinding, Table, } from "@budibase/types" import environment from "../../environment" -type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any +type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any const envLimit = environment.SQL_MAX_ROWS ? parseInt(environment.SQL_MAX_ROWS) @@ -43,8 +50,11 @@ function likeKey(client: string, key: string): string { start = "[" end = "]" break + case SqlClient.SQL_LITE: + start = end = "'" + break default: - throw "Unknown client" + throw new Error("Unknown client generating like key") } const parts = key.split(".") key = parts.map(part => `${start}${part}${end}`).join(".") @@ -587,9 +597,15 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { * which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes. * @return the query ready to be passed to the driver. */ - _query(json: QueryJson, opts: QueryOptions = {}): Knex.SqlNative | Knex.Sql { + _query(json: QueryJson, opts: QueryOptions = {}): SqlQuery | SqlQuery[] { const sqlClient = this.getSqlClient() - const client = knex({ client: sqlClient }) + const config: { client: string; useNullAsDefault?: boolean } = { + client: sqlClient, + } + if (sqlClient === SqlClient.SQL_LITE) { + config.useNullAsDefault = true + } + const client = knex(config) let query: Knex.QueryBuilder const builder = new InternalBuilder(sqlClient) switch (this._operation(json)) { @@ -615,7 +631,12 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { default: throw `Operation type is not supported by SQL query builder` } - return query.toSQL().toNative() + + if (opts?.disableBindings) { + return { sql: query.toString() } + } else { + return getNativeSql(query) + } } async getReturningRow(queryFn: QueryFunction, json: QueryJson) { @@ -730,7 +751,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { ) } - log(query: string, values?: any[]) { + log(query: string, values?: SqlQueryBinding) { if (!environment.SQL_LOGGING_ENABLE) { return } diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts index 80f3864438..4ff336421f 100644 --- a/packages/server/src/integrations/base/sqlTable.ts +++ b/packages/server/src/integrations/base/sqlTable.ts @@ -8,8 +8,9 @@ import { RenameColumn, Table, FieldType, + SqlQuery, } from "@budibase/types" -import { breakExternalTableId, SqlClient } from "../utils" +import { breakExternalTableId, getNativeSql, SqlClient } from "../utils" import SchemaBuilder = Knex.SchemaBuilder import CreateTableBuilder = Knex.CreateTableBuilder import { utils } from "@budibase/shared-core" @@ -199,7 +200,7 @@ class SqlTableQueryBuilder { return json.endpoint.operation } - _tableQuery(json: QueryJson): Knex.Sql | Knex.SqlNative { + _tableQuery(json: QueryJson): SqlQuery | SqlQuery[] { let client = knex({ client: this.sqlClient }).schema let schemaName = json?.endpoint?.schema if (schemaName) { @@ -246,7 +247,7 @@ class SqlTableQueryBuilder { const tableName = schemaName ? `${schemaName}.${json.table.name}` : `${json.table.name}` - const sql = query.toSQL() + const sql = getNativeSql(query) if (Array.isArray(sql)) { for (const query of sql) { if (query.sql.startsWith("exec sp_rename")) { @@ -265,7 +266,7 @@ class SqlTableQueryBuilder { default: throw "Table operation is of unknown type" } - return query.toSQL() + return getNativeSql(query) } } diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index bc0e25e267..1573c98f16 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -336,7 +336,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { return { tables: externalTables, errors } } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson): Promise { const sheet = json.endpoint.entityId switch (json.endpoint.operation) { case Operation.CREATE: diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index 8e8e84f533..5626d7eda3 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -496,7 +496,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { return response.recordset || [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson): Promise { const schema = this.config.schema await this.connect() if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) { diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index fd9d57d255..19a63a44ad 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -13,6 +13,7 @@ import { Schema, TableSourceType, DatasourcePlusQueryResponse, + SqlQueryBinding, } from "@budibase/types" import { getSqlQuery, @@ -113,7 +114,7 @@ const defaultTypeCasting = function (field: any, next: any) { return next() } -export function bindingTypeCoerce(bindings: any[]) { +export function bindingTypeCoerce(bindings: SqlQueryBinding) { for (let i = 0; i < bindings.length; i++) { const binding = bindings[i] if (typeof binding !== "string") { @@ -143,7 +144,7 @@ export function bindingTypeCoerce(bindings: any[]) { } class MySQLIntegration extends Sql implements DatasourcePlus { - private config: MySQLConfig + private readonly config: MySQLConfig private client?: mysql.Connection constructor(config: MySQLConfig) { @@ -382,7 +383,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { return results.length ? results : [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson): Promise { await this.connect() try { const queryFn = (query: any) => diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 9104aadbcc..f6ec593f2f 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -423,7 +423,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { : [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson): Promise { const operation = this._operation(json) const input = this._query(json, { disableReturning: true }) as SqlQuery if (Array.isArray(input)) { diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index 05a519da64..e810986757 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -421,7 +421,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus { return response.rows.length ? response.rows : [{ deleted: true }] } - async query(json: QueryJson): DatasourcePlusQueryResponse { + async query(json: QueryJson): Promise { const operation = this._operation(json).toLowerCase() const input = this._query(json) as SqlQuery if (Array.isArray(input)) { diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts index ccb164bcfd..bfca24ff7d 100644 --- a/packages/server/src/integrations/tests/sqlAlias.spec.ts +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -1,10 +1,15 @@ -import { Datasource, Operation, QueryJson, SourceName } from "@budibase/types" +import { + Datasource, + Operation, + QueryJson, + SourceName, + SqlQuery, +} from "@budibase/types" import { join } from "path" import Sql from "../base/sql" import { SqlClient } from "../utils" import AliasTables from "../../api/controllers/row/alias" import { generator } from "@budibase/backend-core/tests" -import { Knex } from "knex" function multiline(sql: string) { return sql.replace(/\n/g, "").replace(/ +/g, " ") @@ -172,8 +177,8 @@ describe("Captures of real examples", () => { }) // now check returning - let returningQuery: Knex.SqlNative = { sql: "", bindings: [] } - SQL.getReturningRow((input: Knex.SqlNative) => { + let returningQuery: SqlQuery | SqlQuery[] = { sql: "", bindings: [] } + SQL.getReturningRow((input: SqlQuery | SqlQuery[]) => { returningQuery = input }, queryJson) expect(returningQuery).toEqual({ diff --git a/packages/server/src/integrations/utils.ts b/packages/server/src/integrations/utils.ts index 1ba379da9e..d5f6d191e1 100644 --- a/packages/server/src/integrations/utils.ts +++ b/packages/server/src/integrations/utils.ts @@ -1,19 +1,15 @@ import { SqlQuery, Table, - SearchFilters, Datasource, FieldType, TableSourceType, } from "@budibase/types" import { DocumentType, SEPARATOR } from "../db/utils" -import { - InvalidColumns, - NoEmptyFilterStrings, - DEFAULT_BB_DATASOURCE_ID, -} from "../constants" +import { InvalidColumns, DEFAULT_BB_DATASOURCE_ID } from "../constants" import { helpers } from "@budibase/shared-core" import env from "../environment" +import { Knex } from "knex" const DOUBLE_SEPARATOR = `${SEPARATOR}${SEPARATOR}` const ROW_ID_REGEX = /^\[.*]$/g @@ -91,6 +87,7 @@ export enum SqlClient { POSTGRES = "pg", MY_SQL = "mysql2", ORACLE = "oracledb", + SQL_LITE = "sqlite3", } const isCloud = env.isProd() && !env.SELF_HOSTED @@ -109,6 +106,23 @@ export function isInternalTableID(tableId: string) { return !isExternalTableID(tableId) } +export function getNativeSql( + query: Knex.SchemaBuilder | Knex.QueryBuilder +): SqlQuery | SqlQuery[] { + let sql = query.toSQL() + if (Array.isArray(sql)) { + return sql as SqlQuery[] + } + let native: Knex.SqlNative | undefined + if (sql.toNative) { + native = sql.toNative() + } + return { + sql: native?.sql || sql.sql, + bindings: native?.bindings || sql.bindings, + } as SqlQuery +} + export function isExternalTable(table: Table) { if ( table?.sourceId && @@ -420,32 +434,3 @@ export function getPrimaryDisplay(testValue: unknown): string | undefined { export function isValidFilter(value: any) { return value != null && value !== "" } - -// don't do a pure falsy check, as 0 is included -// https://github.com/Budibase/budibase/issues/10118 -export function removeEmptyFilters(filters: SearchFilters) { - for (let filterField of NoEmptyFilterStrings) { - if (!filters[filterField]) { - continue - } - - for (let filterType of Object.keys(filters)) { - if (filterType !== filterField) { - continue - } - // don't know which one we're checking, type could be anything - const value = filters[filterType] as unknown - if (typeof value === "object") { - for (let [key, value] of Object.entries( - filters[filterType] as object - )) { - if (value == null || value === "") { - // @ts-ignore - delete filters[filterField][key] - } - } - } - } - } - return filters -} diff --git a/packages/server/src/middleware/builder.ts b/packages/server/src/middleware/builder.ts index 7df135c86a..87c7eef51f 100644 --- a/packages/server/src/middleware/builder.ts +++ b/packages/server/src/middleware/builder.ts @@ -1,8 +1,4 @@ -import { - APP_DEV_PREFIX, - DocumentType, - getGlobalIDFromUserMetadataID, -} from "../db/utils" +import { APP_DEV_PREFIX, getGlobalIDFromUserMetadataID } from "../db/utils" import { doesUserHaveLock, updateLock, @@ -10,7 +6,7 @@ import { setDebounce, } from "../utilities/redis" import { db as dbCore, cache } from "@budibase/backend-core" -import { UserCtx, Database } from "@budibase/types" +import { DocumentType, UserCtx, Database } from "@budibase/types" const DEBOUNCE_TIME_SEC = 30 diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index 63bbd699fa..928c0f6780 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -1,10 +1,18 @@ -import { Row, SearchFilters, SearchParams, SortOrder } from "@budibase/types" +import { + Row, + RowSearchParams, + SearchFilters, + SearchResponse, +} from "@budibase/types" import { isExternalTableID } from "../../../integrations/utils" import * as internal from "./search/internal" import * as external from "./search/external" -import { Format } from "../../../api/controllers/view/exporters" +import { NoEmptyFilterStrings } from "../../../constants" +import * as sqs from "./search/sqs" +import env from "../../../environment" +import { ExportRowsParams, ExportRowsResult } from "./search/types" -export { isValidFilter, removeEmptyFilters } from "../../../integrations/utils" +export { isValidFilter } from "../../../integrations/utils" export interface ViewParams { calculation: string @@ -19,29 +27,46 @@ function pickApi(tableId: any) { return internal } -export async function search(options: SearchParams): Promise<{ - rows: any[] - hasNextPage?: boolean - bookmark?: number | null -}> { - return pickApi(options.tableId).search(options) +// don't do a pure falsy check, as 0 is included +// https://github.com/Budibase/budibase/issues/10118 +export function removeEmptyFilters(filters: SearchFilters) { + for (let filterField of NoEmptyFilterStrings) { + if (!filters[filterField]) { + continue + } + + for (let filterType of Object.keys(filters)) { + if (filterType !== filterField) { + continue + } + // don't know which one we're checking, type could be anything + const value = filters[filterType] as unknown + if (typeof value === "object") { + for (let [key, value] of Object.entries( + filters[filterType] as object + )) { + if (value == null || value === "") { + // @ts-ignore + delete filters[filterField][key] + } + } + } + } + } + return filters } -export interface ExportRowsParams { - tableId: string - format: Format - delimiter?: string - rowIds?: string[] - columns?: string[] - query?: SearchFilters - sort?: string - sortOrder?: SortOrder - customHeaders?: { [key: string]: string } -} - -export interface ExportRowsResult { - fileName: string - content: string +export async function search( + options: RowSearchParams +): Promise> { + const isExternalTable = isExternalTableID(options.tableId) + if (isExternalTable) { + return external.search(options) + } else if (env.SQS_SEARCH_ENABLE) { + return sqs.search(options) + } else { + return internal.search(options) + } } export async function exportRows( diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index e95b904767..e0a3bad94e 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -6,28 +6,31 @@ import { IncludeRelationship, Row, SearchFilters, - SearchParams, + RowSearchParams, + SearchResponse, } from "@budibase/types" import * as exporters from "../../../../api/controllers/view/exporters" -import sdk from "../../../../sdk" import { handleRequest } from "../../../../api/controllers/row/external" import { breakExternalTableId, breakRowIdField, } from "../../../../integrations/utils" -import { cleanExportRows } from "../utils" import { utils } from "@budibase/shared-core" -import { ExportRowsParams, ExportRowsResult } from "../search" +import { ExportRowsParams, ExportRowsResult } from "./types" import { HTTPError, db } from "@budibase/backend-core" import { searchInputMapping } from "./utils" import pick from "lodash/pick" import { outputProcessing } from "../../../../utilities/rowProcessor" +import sdk from "../../../" -export async function search(options: SearchParams) { +export async function search( + options: RowSearchParams +): Promise> { const { tableId } = options const { paginate, query, ...params } = options const { limit } = params - let bookmark = (params.bookmark && parseInt(params.bookmark)) || null + let bookmark = + (params.bookmark && parseInt(params.bookmark as string)) || undefined if (paginate && !bookmark) { bookmark = 1 } @@ -92,7 +95,7 @@ export async function search(options: SearchParams) { rows = rows.map((r: any) => pick(r, fields)) } - rows = await outputProcessing(table, rows, { + rows = await outputProcessing(table, rows, { preserveLinks: true, squash: true, }) @@ -158,7 +161,6 @@ export async function exportRows( if (!tableName) { throw new HTTPError("Could not find table name.", 400) } - const schema = datasource.entities[tableName].schema // Filter data to only specified columns if required if (columns && columns.length) { @@ -173,7 +175,14 @@ export async function exportRows( rows = result.rows } - let exportRows = cleanExportRows(rows, schema, format, columns, customHeaders) + const schema = datasource.entities[tableName].schema + let exportRows = sdk.rows.utils.cleanExportRows( + rows, + schema, + format, + columns, + customHeaders + ) let content: string switch (format) { diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts index 8147ca46ad..610807a10e 100644 --- a/packages/server/src/sdk/app/rows/search/internal.ts +++ b/packages/server/src/sdk/app/rows/search/internal.ts @@ -1,20 +1,19 @@ -import { - context, - db, - HTTPError, - SearchParams as InternalSearchParams, -} from "@budibase/backend-core" +import { context, db, HTTPError } from "@budibase/backend-core" import env from "../../../../environment" -import { fullSearch, paginatedSearch } from "./internalSearch" +import { fullSearch, paginatedSearch, searchInputMapping } from "./utils" +import { getRowParams, InternalTables } from "../../../../db/utils" import { + Database, DocumentType, - getRowParams, - InternalTables, -} from "../../../../db/utils" + Row, + RowSearchParams, + SearchResponse, + SortType, + Table, + User, +} from "@budibase/types" import { getGlobalUsersFromMetadata } from "../../../../utilities/global" import { outputProcessing } from "../../../../utilities/rowProcessor" -import { Database, Row, SearchParams, Table } from "@budibase/types" -import { cleanExportRows } from "../utils" import { csv, Format, @@ -29,17 +28,18 @@ import { migrateToInMemoryView, } from "../../../../api/controllers/view/utils" import sdk from "../../../../sdk" -import { ExportRowsParams, ExportRowsResult } from "../search" -import { searchInputMapping } from "./utils" +import { ExportRowsParams, ExportRowsResult } from "./types" import pick from "lodash/pick" import { breakRowIdField } from "../../../../integrations/utils" -export async function search(options: SearchParams) { +export async function search( + options: RowSearchParams +): Promise> { const { tableId } = options const { paginate, query } = options - const params: InternalSearchParams = { + const params: RowSearchParams = { tableId: options.tableId, sort: options.sort, sortOrder: options.sortOrder, @@ -48,6 +48,7 @@ export async function search(options: SearchParams) { bookmark: options.bookmark, version: options.version, disableEscaping: options.disableEscaping, + query: {}, } let table = await sdk.tables.getTable(tableId) @@ -55,7 +56,8 @@ export async function search(options: SearchParams) { if (params.sort && !params.sortType) { const schema = table.schema const sortField = schema[params.sort] - params.sortType = sortField.type === "number" ? "number" : "string" + params.sortType = + sortField.type === "number" ? SortType.NUMBER : SortType.STRING } let response @@ -69,7 +71,7 @@ export async function search(options: SearchParams) { if (response.rows && response.rows.length) { // enrich with global users if from users table if (tableId === InternalTables.USER_METADATA) { - response.rows = await getGlobalUsersFromMetadata(response.rows) + response.rows = await getGlobalUsersFromMetadata(response.rows as User[]) } if (options.fields) { @@ -100,10 +102,10 @@ export async function exportRows( const db = context.getAppDB() const table = await sdk.tables.getTable(tableId) - let result + let result: Row[] = [] if (rowIds) { let response = ( - await db.allDocs({ + await db.allDocs({ include_docs: true, keys: rowIds.map((row: string) => { const ids = breakRowIdField(row) @@ -116,9 +118,9 @@ export async function exportRows( return ids[0] }), }) - ).rows.map(row => row.doc) + ).rows.map(row => row.doc!) - result = await outputProcessing(table, response) + result = await outputProcessing(table, response) } else if (query) { let searchResponse = await search({ tableId, @@ -145,7 +147,13 @@ export async function exportRows( rows = result } - let exportRows = cleanExportRows(rows, schema, format, columns, customHeaders) + let exportRows = sdk.rows.utils.cleanExportRows( + rows, + schema, + format, + columns, + customHeaders + ) if (format === Format.CSV) { return { fileName: "export.csv", diff --git a/packages/server/src/sdk/app/rows/search/internalSearch.ts b/packages/server/src/sdk/app/rows/search/internalSearch.ts index 9dc12342d6..e69de29bb2 100644 --- a/packages/server/src/sdk/app/rows/search/internalSearch.ts +++ b/packages/server/src/sdk/app/rows/search/internalSearch.ts @@ -1,18 +0,0 @@ -import { db as dbCore, context, SearchParams } from "@budibase/backend-core" -import { SearchFilters, Row, SearchIndex } from "@budibase/types" - -export async function paginatedSearch( - query: SearchFilters, - params: SearchParams -) { - const appId = context.getAppId() - return dbCore.paginatedSearch(appId!, SearchIndex.ROWS, query, params) -} - -export async function fullSearch( - query: SearchFilters, - params: SearchParams -) { - const appId = context.getAppId() - return dbCore.fullSearch(appId!, SearchIndex.ROWS, query, params) -} diff --git a/packages/server/src/sdk/app/rows/search/sqs.ts b/packages/server/src/sdk/app/rows/search/sqs.ts new file mode 100644 index 0000000000..8aba94c886 --- /dev/null +++ b/packages/server/src/sdk/app/rows/search/sqs.ts @@ -0,0 +1,190 @@ +import { + FieldType, + Operation, + QueryJson, + RelationshipFieldMetadata, + Row, + SearchFilters, + RowSearchParams, + SearchResponse, + SortDirection, + SortOrder, + SortType, + Table, +} from "@budibase/types" +import SqlQueryBuilder from "../../../../integrations/base/sql" +import { SqlClient } from "../../../../integrations/utils" +import { + buildInternalRelationships, + sqlOutputProcessing, +} from "../../../../api/controllers/row/utils" +import sdk from "../../../index" +import { context } from "@budibase/backend-core" +import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" + +function buildInternalFieldList( + table: Table, + tables: Table[], + opts: { relationships: boolean } = { relationships: true } +) { + let fieldList: string[] = [] + fieldList = fieldList.concat( + CONSTANT_INTERNAL_ROW_COLS.map(col => `${table._id}.${col}`) + ) + if (opts.relationships) { + for (let col of Object.values(table.schema)) { + if (col.type === FieldType.LINK) { + const linkCol = col as RelationshipFieldMetadata + const relatedTable = tables.find( + table => table._id === linkCol.tableId + )! + fieldList = fieldList.concat( + buildInternalFieldList(relatedTable, tables, { relationships: false }) + ) + } else { + fieldList.push(`${table._id}.${col.name}`) + } + } + } + return fieldList +} + +function tableInFilter(name: string) { + return `:${name}.` +} + +function cleanupFilters(filters: SearchFilters, tables: Table[]) { + for (let filter of Object.values(filters)) { + if (typeof filter !== "object") { + continue + } + for (let [key, keyFilter] of Object.entries(filter)) { + if (keyFilter === "") { + delete filter[key] + } + + // relationship, switch to table ID + const tableRelated = tables.find( + table => + table.originalName && key.includes(tableInFilter(table.originalName)) + ) + if (tableRelated && tableRelated.originalName) { + filter[ + key.replace( + tableInFilter(tableRelated.originalName), + tableInFilter(tableRelated._id!) + ) + ] = filter[key] + delete filter[key] + } + } + } + return filters +} + +function buildTableMap(tables: Table[]) { + const tableMap: Record = {} + for (let table of tables) { + // update the table name, should never query by name for SQLite + table.originalName = table.name + table.name = table._id! + tableMap[table._id!] = table + } + return tableMap +} + +export async function search( + options: RowSearchParams +): Promise> { + const { tableId, paginate, query, ...params } = options + + const builder = new SqlQueryBuilder(SqlClient.SQL_LITE) + const allTables = await sdk.tables.getAllInternalTables() + const allTablesMap = buildTableMap(allTables) + const table = allTables.find(table => table._id === tableId) + if (!table) { + throw new Error("Unable to find table") + } + + const relationships = buildInternalRelationships(table) + + const request: QueryJson = { + endpoint: { + // not important, we query ourselves + datasourceId: "internal", + entityId: table._id!, + operation: Operation.READ, + }, + filters: cleanupFilters(query, allTables), + table, + meta: { + table, + tables: allTablesMap, + }, + resource: { + fields: buildInternalFieldList(table, allTables), + }, + relationships, + } + // make sure only rows returned + request.filters!.equal = { + ...request.filters?.equal, + type: "row", + } + + if (params.sort && !params.sortType) { + const sortField = table.schema[params.sort] + const sortType = + sortField.type === FieldType.NUMBER ? SortType.NUMBER : SortType.STRING + const sortDirection = + params.sortOrder === SortOrder.ASCENDING + ? SortDirection.ASCENDING + : SortDirection.DESCENDING + request.sort = { + [sortField.name]: { + direction: sortDirection, + type: sortType as SortType, + }, + } + } + if (paginate && params.limit) { + request.paginate = { + limit: params.limit, + page: params.bookmark, + } + } + try { + const query = builder._query(request, { + disableReturning: true, + disableBindings: true, + }) + + if (Array.isArray(query)) { + throw new Error("SQS cannot currently handle multiple queries") + } + + let sql = query.sql + + // quick hack for docIds + sql = sql.replace(/`doc1`.`rowId`/g, "`doc1.rowId`") + sql = sql.replace(/`doc2`.`rowId`/g, "`doc2.rowId`") + + const db = context.getAppDB() + const rows = await db.sql(sql) + + return { + rows: await sqlOutputProcessing( + rows, + table!, + allTablesMap, + relationships, + { + internal: true, + } + ), + } + } catch (err: any) { + const msg = typeof err === "string" ? err : err.message + throw new Error(`Unable to search by SQL - ${msg}`) + } +} diff --git a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts index 596e41cece..f2bdec4692 100644 --- a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts @@ -6,7 +6,7 @@ import { Row, SourceName, Table, - SearchParams, + RowSearchParams, TableSourceType, } from "@budibase/types" @@ -108,7 +108,7 @@ describe("external search", () => { await config.doInContext(config.appId, async () => { const tableId = config.table!._id! - const searchParams: SearchParams = { + const searchParams: RowSearchParams = { tableId, query: {}, } @@ -125,7 +125,7 @@ describe("external search", () => { await config.doInContext(config.appId, async () => { const tableId = config.table!._id! - const searchParams: SearchParams = { + const searchParams: RowSearchParams = { tableId, query: {}, fields: ["name", "age"], @@ -149,7 +149,7 @@ describe("external search", () => { await config.doInContext(config.appId, async () => { const tableId = config.table!._id! - const searchParams: SearchParams = { + const searchParams: RowSearchParams = { tableId, query: { oneOf: { diff --git a/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts b/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts index d82af66e3d..5be0f4a258 100644 --- a/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/internal.spec.ts @@ -2,7 +2,7 @@ import { FieldType, Row, Table, - SearchParams, + RowSearchParams, INTERNAL_TABLE_SOURCE_ID, TableSourceType, } from "@budibase/types" @@ -77,7 +77,7 @@ describe("internal", () => { await config.doInContext(config.appId, async () => { const tableId = config.table!._id! - const searchParams: SearchParams = { + const searchParams: RowSearchParams = { tableId, query: {}, } @@ -94,7 +94,7 @@ describe("internal", () => { await config.doInContext(config.appId, async () => { const tableId = config.table!._id! - const searchParams: SearchParams = { + const searchParams: RowSearchParams = { tableId, query: {}, fields: ["name", "age"], diff --git a/packages/server/src/api/routes/tests/internalSearch.spec.ts b/packages/server/src/sdk/app/rows/search/tests/lucene.ts similarity index 95% rename from packages/server/src/api/routes/tests/internalSearch.spec.ts rename to packages/server/src/sdk/app/rows/search/tests/lucene.ts index 9e1c3e3340..708f362198 100644 --- a/packages/server/src/api/routes/tests/internalSearch.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/lucene.ts @@ -1,18 +1,18 @@ const nodeFetch = require("node-fetch") nodeFetch.mockSearch() -import { SearchParams } from "@budibase/backend-core" -import * as search from "../../../sdk/app/rows/search/internalSearch" -import { Row } from "@budibase/types" +import * as search from "../utils" +import { RowSearchParams, SortOrder, SortType } from "@budibase/types" // this will be mocked out for _search endpoint -const PARAMS: SearchParams = { +const PARAMS: RowSearchParams = { + query: {}, tableId: "ta_12345679abcdef", version: "1", bookmark: undefined, sort: undefined, - sortOrder: "ascending", - sortType: "string", + sortOrder: SortOrder.ASCENDING, + sortType: SortType.STRING, } function checkLucene(resp: any, expected: any, params = PARAMS) { diff --git a/packages/server/src/sdk/app/rows/search/tests/utils.spec.ts b/packages/server/src/sdk/app/rows/search/tests/utils.spec.ts index 269902bc88..bf7799402d 100644 --- a/packages/server/src/sdk/app/rows/search/tests/utils.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/utils.spec.ts @@ -4,7 +4,7 @@ import { FieldType, FieldTypeSubtypes, INTERNAL_TABLE_SOURCE_ID, - SearchParams, + RowSearchParams, Table, TableSourceType, } from "@budibase/types" @@ -47,7 +47,7 @@ describe.each([tableWithUserCol, tableWithUsersCol])( const userMedataId = dbCore.generateUserMetadataID(globalUserId) it("should be able to map ro_ to global user IDs", () => { - const params: SearchParams = { + const params: RowSearchParams = { tableId, query: { equal: { @@ -60,7 +60,7 @@ describe.each([tableWithUserCol, tableWithUsersCol])( }) it("should handle array of user IDs", () => { - const params: SearchParams = { + const params: RowSearchParams = { tableId, query: { oneOf: { @@ -77,7 +77,7 @@ describe.each([tableWithUserCol, tableWithUsersCol])( it("shouldn't change any other input", () => { const email = "test@example.com" - const params: SearchParams = { + const params: RowSearchParams = { tableId, query: { equal: { diff --git a/packages/server/src/sdk/app/rows/search/types.ts b/packages/server/src/sdk/app/rows/search/types.ts new file mode 100644 index 0000000000..82620a3f73 --- /dev/null +++ b/packages/server/src/sdk/app/rows/search/types.ts @@ -0,0 +1,19 @@ +import { Format } from "../../../../api/controllers/view/exporters" +import { SearchFilters, SortOrder } from "@budibase/types" + +export interface ExportRowsParams { + tableId: string + format: Format + delimiter?: string + rowIds?: string[] + columns?: string[] + query?: SearchFilters + sort?: string + sortOrder?: SortOrder + customHeaders?: { [key: string]: string } +} + +export interface ExportRowsResult { + fileName: string + content: string +} diff --git a/packages/server/src/sdk/app/rows/search/utils.ts b/packages/server/src/sdk/app/rows/search/utils.ts index 086599665b..828925f8b6 100644 --- a/packages/server/src/sdk/app/rows/search/utils.ts +++ b/packages/server/src/sdk/app/rows/search/utils.ts @@ -1,17 +1,37 @@ import { FieldType, - SearchParams, Table, DocumentType, SEPARATOR, FieldSubtype, + SearchFilters, + SearchIndex, + SearchResponse, + Row, + RowSearchParams, } from "@budibase/types" -import { db as dbCore } from "@budibase/backend-core" +import { db as dbCore, context } from "@budibase/backend-core" import { utils } from "@budibase/shared-core" +export async function paginatedSearch( + query: SearchFilters, + params: RowSearchParams +): Promise> { + const appId = context.getAppId() + return dbCore.paginatedSearch(appId!, SearchIndex.ROWS, query, params) +} + +export async function fullSearch( + query: SearchFilters, + params: RowSearchParams +): Promise<{ rows: Row[] }> { + const appId = context.getAppId() + return dbCore.fullSearch(appId!, SearchIndex.ROWS, query, params) +} + function findColumnInQueries( column: string, - options: SearchParams, + options: RowSearchParams, callback: (filter: any) => any ) { if (!options.query) { @@ -29,7 +49,7 @@ function findColumnInQueries( } } -function userColumnMapping(column: string, options: SearchParams) { +function userColumnMapping(column: string, options: RowSearchParams) { findColumnInQueries(column, options, (filterValue: any): any => { const isArray = Array.isArray(filterValue), isString = typeof filterValue === "string" @@ -60,7 +80,7 @@ function userColumnMapping(column: string, options: SearchParams) { // maps through the search parameters to check if any of the inputs are invalid // based on the table schema, converts them to something that is valid. -export function searchInputMapping(table: Table, options: SearchParams) { +export function searchInputMapping(table: Table, options: RowSearchParams) { if (!table?.schema) { return options } diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index 8aa017d238..b7adf7131c 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -49,7 +49,7 @@ export function getSQLClient(datasource: Datasource): SqlClient { export async function getDatasourceAndQuery( json: QueryJson -): DatasourcePlusQueryResponse { +): Promise { const datasourceId = json.endpoint.datasourceId const datasource = await sdk.datasources.get(datasourceId) return makeExternalQuery(datasource, json) diff --git a/packages/server/src/sdk/app/tables/index.ts b/packages/server/src/sdk/app/tables/index.ts index ed71498d44..fcf7051e7c 100644 --- a/packages/server/src/sdk/app/tables/index.ts +++ b/packages/server/src/sdk/app/tables/index.ts @@ -3,6 +3,7 @@ import * as getters from "./getters" import * as updates from "./update" import * as utils from "./utils" import { migrate } from "./migration" +import * as sqs from "./internal/sqs" export default { populateExternalTableSchemas, @@ -10,4 +11,5 @@ export default { ...getters, ...utils, migrate, + sqs, } diff --git a/packages/server/src/sdk/app/tables/internal/sqs.ts b/packages/server/src/sdk/app/tables/internal/sqs.ts new file mode 100644 index 0000000000..da947c62c2 --- /dev/null +++ b/packages/server/src/sdk/app/tables/internal/sqs.ts @@ -0,0 +1,81 @@ +import { context, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core" +import { FieldType, SQLiteDefinition, SQLiteType, Table } from "@budibase/types" +import { cloneDeep } from "lodash" +import tablesSdk from "../" +import { CONSTANT_INTERNAL_ROW_COLS } from "../../../../db/utils" + +const BASIC_SQLITE_DOC: SQLiteDefinition = { + _id: SQLITE_DESIGN_DOC_ID, + language: "sqlite", + sql: { + tables: {}, + options: { + table_name: "tableId", + }, + }, +} + +const FieldTypeMap: Record = { + [FieldType.BOOLEAN]: SQLiteType.NUMERIC, + [FieldType.DATETIME]: SQLiteType.TEXT, + [FieldType.FORMULA]: SQLiteType.TEXT, + [FieldType.LONGFORM]: SQLiteType.TEXT, + [FieldType.NUMBER]: SQLiteType.REAL, + [FieldType.STRING]: SQLiteType.TEXT, + [FieldType.AUTO]: SQLiteType.TEXT, + [FieldType.OPTIONS]: SQLiteType.TEXT, + [FieldType.JSON]: SQLiteType.BLOB, + [FieldType.INTERNAL]: SQLiteType.BLOB, + [FieldType.BARCODEQR]: SQLiteType.BLOB, + [FieldType.ATTACHMENT]: SQLiteType.BLOB, + [FieldType.ARRAY]: SQLiteType.BLOB, + [FieldType.LINK]: SQLiteType.BLOB, + [FieldType.BIGINT]: SQLiteType.REAL, + // TODO: consider the difference between multi-user and single user types (subtyping) + [FieldType.BB_REFERENCE]: SQLiteType.TEXT, +} + +function mapTable(table: Table): { [key: string]: SQLiteType } { + const fields: Record = {} + for (let [key, column] of Object.entries(table.schema)) { + if (!FieldTypeMap[column.type]) { + throw new Error(`Unable to map type "${column.type}" to SQLite type`) + } + fields[key] = FieldTypeMap[column.type] + } + // there are some extra columns to map - add these in + const constantMap: Record = {} + CONSTANT_INTERNAL_ROW_COLS.forEach(col => { + constantMap[col] = SQLiteType.TEXT + }) + return { + ...constantMap, + ...fields, + } +} + +// nothing exists, need to iterate though existing tables +async function buildBaseDefinition(): Promise { + const tables = await tablesSdk.getAllInternalTables() + const definition = cloneDeep(BASIC_SQLITE_DOC) + for (let table of tables) { + definition.sql.tables[table._id!] = { + fields: mapTable(table), + } + } + return definition +} + +export async function addTableToSqlite(table: Table) { + const db = context.getAppDB() + let definition: SQLiteDefinition + try { + definition = await db.get(SQLITE_DESIGN_DOC_ID) + } catch (err) { + definition = await buildBaseDefinition() + } + definition.sql.tables[table._id!] = { + fields: mapTable(table), + } + await db.put(definition) +} diff --git a/packages/server/src/startup/index.ts b/packages/server/src/startup/index.ts index 48d500a0cf..3e6f44fdb5 100644 --- a/packages/server/src/startup/index.ts +++ b/packages/server/src/startup/index.ts @@ -28,6 +28,11 @@ import fs from "fs" let STARTUP_RAN = false +if (env.isProd() && env.SQS_SEARCH_ENABLE) { + console.error("Stopping service - SQS search support is not yet available.") + process.exit(-1) +} + async function initRoutes(app: Koa) { if (!env.isTest()) { const plugin = await bullboard.init() diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 22c03016c7..42f173a1f0 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -58,7 +58,7 @@ import { RelationshipType, Row, Screen, - SearchParams, + RowSearchParams, SourceName, Table, TableSourceType, @@ -733,7 +733,7 @@ export default class TestConfiguration { return this.api.row.fetch(tableId) } - async searchRows(tableId: string, searchParams?: SearchParams) { + async searchRows(tableId: string, searchParams?: RowSearchParams) { if (!tableId && this.table) { tableId = this.table._id! } diff --git a/packages/server/src/tests/utilities/api/row.ts b/packages/server/src/tests/utilities/api/row.ts index 052e66b86e..fc40262363 100644 --- a/packages/server/src/tests/utilities/api/row.ts +++ b/packages/server/src/tests/utilities/api/row.ts @@ -7,7 +7,7 @@ import { BulkImportRequest, BulkImportResponse, SearchRowResponse, - SearchParams, + RowSearchParams, DeleteRows, DeleteRow, } from "@budibase/types" @@ -135,7 +135,7 @@ export class RowAPI extends TestAPI { search = async ( sourceId: string, - params?: SearchParams, + params?: RowSearchParams, expectations?: Expectations ): Promise => { return await this._post(`/api/${sourceId}/search`, { diff --git a/packages/types/src/api/web/app/rows.ts b/packages/types/src/api/web/app/rows.ts index 0a43182dfd..5d49f01bfc 100644 --- a/packages/types/src/api/web/app/rows.ts +++ b/packages/types/src/api/web/app/rows.ts @@ -1,4 +1,4 @@ -import { SearchFilters, SearchParams } from "../../../sdk" +import { SearchFilters, RowSearchParams } from "../../../sdk" import { Row } from "../../../documents" import { PaginationResponse, SortOrder } from "../../../api" import { ReadStream } from "fs" @@ -13,7 +13,7 @@ export interface PatchRowRequest extends Row { export interface PatchRowResponse extends Row {} -export interface SearchRowRequest extends Omit {} +export interface SearchRowRequest extends Omit {} export interface SearchViewRowRequest extends Pick< diff --git a/packages/types/src/api/web/pagination.ts b/packages/types/src/api/web/pagination.ts index c61f2306ca..48588bf6a1 100644 --- a/packages/types/src/api/web/pagination.ts +++ b/packages/types/src/api/web/pagination.ts @@ -5,7 +5,7 @@ export enum SortOrder { export enum SortType { STRING = "string", - number = "number", + NUMBER = "number", } export interface BasicPaginationRequest { @@ -22,6 +22,6 @@ export interface PaginationRequest extends BasicPaginationRequest { } export interface PaginationResponse { - bookmark: string | undefined - hasNextPage: boolean + bookmark: string | number | undefined + hasNextPage?: boolean } diff --git a/packages/types/src/documents/app/index.ts b/packages/types/src/documents/app/index.ts index a58b708de3..3809fba6e5 100644 --- a/packages/types/src/documents/app/index.ts +++ b/packages/types/src/documents/app/index.ts @@ -14,4 +14,5 @@ export * from "./backup" export * from "./webhook" export * from "./links" export * from "./component" +export * from "./sqlite" export * from "./snippet" diff --git a/packages/types/src/documents/app/links.ts b/packages/types/src/documents/app/links.ts index ae7e4de78e..2a9595d99f 100644 --- a/packages/types/src/documents/app/links.ts +++ b/packages/types/src/documents/app/links.ts @@ -8,6 +8,7 @@ export interface LinkInfo { export interface LinkDocument extends Document { type: string + tableId: string doc1: LinkInfo doc2: LinkInfo } diff --git a/packages/types/src/documents/app/sqlite.ts b/packages/types/src/documents/app/sqlite.ts new file mode 100644 index 0000000000..76c47bbd74 --- /dev/null +++ b/packages/types/src/documents/app/sqlite.ts @@ -0,0 +1,24 @@ +export enum SQLiteType { + REAL = "REAL", + TEXT = "VARCHAR", + INT = "INTEGER", + BLOB = "BLOB", + NUMERIC = "NUMERIC", +} + +export interface SQLiteDefinition { + _id: string + language: string + sql: { + tables: { + [tableName: string]: { + fields: { + [key: string]: SQLiteType | { field: string; type: SQLiteType } + } + } + } + options: { + table_name: string + } + } +} diff --git a/packages/types/src/documents/app/table/table.ts b/packages/types/src/documents/app/table/table.ts index 3b419dd811..b284e9a840 100644 --- a/packages/types/src/documents/app/table/table.ts +++ b/packages/types/src/documents/app/table/table.ts @@ -15,6 +15,7 @@ export interface Table extends Document { sourceType: TableSourceType views?: { [key: string]: View | ViewV2 } name: string + originalName?: string sourceId: string primary?: string[] schema: TableSchema diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 6b09959b6c..e1a012d81e 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -190,16 +190,17 @@ enum DSPlusOperation { DELETE = "delete", } -export type DatasourcePlusQueryResponse = Promise< - Row[] | Record[] | void -> +export type DatasourcePlusQueryResponse = + | Row[] + | Record[] + | void export interface DatasourcePlus extends IntegrationBase { // if the datasource supports the use of bindings directly (to protect against SQL injection) // this returns the format of the identifier getBindingIdentifier(): string getStringConcat(parts: string[]): string - query(json: QueryJson): DatasourcePlusQueryResponse + query(json: QueryJson): Promise buildSchema( datasourceId: string, entities: Record diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 12c86bd9ba..692ddcf737 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -143,6 +143,7 @@ export interface Database { opts?: DatabasePutOpts ): Promise bulkDocs(documents: AnyDocument[]): Promise + sql(sql: string): Promise allDocs( params: DatabaseQueryOpts ): Promise> diff --git a/packages/types/src/sdk/row.ts b/packages/types/src/sdk/row.ts index 7e4b89e0f7..7f3fc1f391 100644 --- a/packages/types/src/sdk/row.ts +++ b/packages/types/src/sdk/row.ts @@ -1,11 +1,13 @@ import { SortOrder, SortType } from "../api" import { SearchFilters } from "./search" +import { Row } from "../documents" +import { WithRequired } from "../shared" export interface SearchParams { - tableId: string + tableId?: string + query?: SearchFilters paginate?: boolean - query: SearchFilters - bookmark?: string + bookmark?: string | number limit?: number sort?: string sortOrder?: SortOrder @@ -13,4 +15,17 @@ export interface SearchParams { version?: string disableEscaping?: boolean fields?: string[] + indexer?: () => Promise + rows?: Row[] +} + +// when searching for rows we want a more extensive search type that requires certain properties +export interface RowSearchParams + extends WithRequired {} + +export interface SearchResponse { + rows: T[] + hasNextPage?: boolean + bookmark?: string | number + totalRows?: number } diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index 7a0ddaed66..9325f09eed 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -1,6 +1,7 @@ import { Operation, SortDirection } from "./datasources" import { Row, Table } from "../documents" import { SortType } from "../api" +import { Knex } from "knex" export interface SearchFilters { allOr?: boolean @@ -101,9 +102,11 @@ export interface QueryJson { tableAliases?: Record } +export type SqlQueryBinding = Knex.Value[] + export interface SqlQuery { sql: string - bindings?: string[] + bindings?: SqlQueryBinding } export enum EmptyFilterOption { diff --git a/yarn.lock b/yarn.lock index f4c0f56414..3ac0351e64 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2594,7 +2594,7 @@ dependencies: "@fortawesome/fontawesome-common-types" "6.4.2" -"@gar/promisify@^1.1.3": +"@gar/promisify@^1.0.1", "@gar/promisify@^1.1.3": version "1.1.3" resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" integrity sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw== @@ -3403,6 +3403,21 @@ dependencies: "@lezer/common" "^1.0.0" +"@mapbox/node-pre-gyp@^1.0.0": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz#417db42b7f5323d79e93b34a6d7a2a12c0df43fa" + integrity sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ== + dependencies: + detect-libc "^2.0.0" + https-proxy-agent "^5.0.0" + make-dir "^3.1.0" + node-fetch "^2.6.7" + nopt "^5.0.0" + npmlog "^5.0.1" + rimraf "^3.0.2" + semver "^7.3.5" + tar "^6.1.11" + "@mapbox/node-pre-gyp@^1.0.10": version "1.0.10" resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.10.tgz#8e6735ccebbb1581e5a7e652244cadc8a844d03c" @@ -3483,6 +3498,14 @@ "@nodelib/fs.scandir" "2.1.5" fastq "^1.6.0" +"@npmcli/fs@^1.0.0": + version "1.1.1" + resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-1.1.1.tgz#72f719fe935e687c56a4faecf3c03d06ba593257" + integrity sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ== + dependencies: + "@gar/promisify" "^1.0.1" + semver "^7.3.5" + "@npmcli/fs@^2.1.0": version "2.1.2" resolved "https://registry.yarnpkg.com/@npmcli/fs/-/fs-2.1.2.tgz#a9e2541a4a2fec2e69c29b35e6060973da79b865" @@ -3520,6 +3543,14 @@ npm-bundled "^3.0.0" npm-normalize-package-bin "^3.0.0" +"@npmcli/move-file@^1.0.1": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-1.1.2.tgz#1a82c3e372f7cae9253eb66d72543d6b8685c674" + integrity sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg== + dependencies: + mkdirp "^1.0.4" + rimraf "^3.0.2" + "@npmcli/move-file@^2.0.0": version "2.0.1" resolved "https://registry.yarnpkg.com/@npmcli/move-file/-/move-file-2.0.1.tgz#26f6bdc379d87f75e55739bab89db525b06100e4" @@ -6714,6 +6745,13 @@ agent-base@6, agent-base@^6.0.2: dependencies: debug "4" +agentkeepalive@^4.1.3: + version "4.5.0" + resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.5.0.tgz#2673ad1389b3c418c5a20c5d7364f93ca04be923" + integrity sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew== + dependencies: + humanize-ms "^1.2.1" + agentkeepalive@^4.2.1: version "4.2.1" resolved "https://registry.yarnpkg.com/agentkeepalive/-/agentkeepalive-4.2.1.tgz#a7975cbb9f83b367f06c90cc51ff28fe7d499717" @@ -7890,6 +7928,30 @@ cac@^6.7.14: resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.14.tgz#804e1e6f506ee363cb0e3ccbb09cad5dd9870959" integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ== +cacache@^15.2.0: + version "15.3.0" + resolved "https://registry.yarnpkg.com/cacache/-/cacache-15.3.0.tgz#dc85380fb2f556fe3dda4c719bfa0ec875a7f1eb" + integrity sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ== + dependencies: + "@npmcli/fs" "^1.0.0" + "@npmcli/move-file" "^1.0.1" + chownr "^2.0.0" + fs-minipass "^2.0.0" + glob "^7.1.4" + infer-owner "^1.0.4" + lru-cache "^6.0.0" + minipass "^3.1.1" + minipass-collect "^1.0.2" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.2" + mkdirp "^1.0.3" + p-map "^4.0.0" + promise-inflight "^1.0.1" + rimraf "^3.0.2" + ssri "^8.0.1" + tar "^6.0.2" + unique-filename "^1.1.1" + cacache@^16.1.0: version "16.1.3" resolved "https://registry.yarnpkg.com/cacache/-/cacache-16.1.3.tgz#a02b9f34ecfaf9a78c9f4bc16fceb94d5d67a38e" @@ -10101,7 +10163,7 @@ encoding-down@^6.2.0, encoding-down@^6.3.0: level-codec "^9.0.0" level-errors "^2.0.0" -encoding@^0.1.13: +encoding@^0.1.12, encoding@^0.1.13: version "0.1.13" resolved "https://registry.yarnpkg.com/encoding/-/encoding-0.1.13.tgz#56574afdd791f54a8e9b2785c0582a2d26210fa9" integrity sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A== @@ -14243,10 +14305,10 @@ kleur@^4.1.5: resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== -knex@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/knex/-/knex-2.4.0.tgz#7d33cc36f320cdac98741010544b4c6a98b8b19e" - integrity sha512-i0GWwqYp1Hs2yvc2rlDO6nzzkLhwdyOZKRdsMTB8ZxOs2IXQyL5rBjSbS1krowCh6V65T4X9CJaKtuIfkaPGSA== +knex@2.4.2: + version "2.4.2" + resolved "https://registry.yarnpkg.com/knex/-/knex-2.4.2.tgz#a34a289d38406dc19a0447a78eeaf2d16ebedd61" + integrity sha512-tMI1M7a+xwHhPxjbl/H9K1kHX+VncEYcvCx5K00M16bWvpYPKAZd6QrCu68PtHAdIZNQPWZn0GVhqVBEthGWCg== dependencies: colorette "2.0.19" commander "^9.1.0" @@ -15333,6 +15395,28 @@ make-fetch-happen@^11.0.0, make-fetch-happen@^11.0.1, make-fetch-happen@^11.1.0: socks-proxy-agent "^7.0.0" ssri "^10.0.0" +make-fetch-happen@^9.1.0: + version "9.1.0" + resolved "https://registry.yarnpkg.com/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz#53085a09e7971433e6765f7971bf63f4e05cb968" + integrity sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg== + dependencies: + agentkeepalive "^4.1.3" + cacache "^15.2.0" + http-cache-semantics "^4.1.0" + http-proxy-agent "^4.0.1" + https-proxy-agent "^5.0.0" + is-lambda "^1.0.1" + lru-cache "^6.0.0" + minipass "^3.1.3" + minipass-collect "^1.0.2" + minipass-fetch "^1.3.2" + minipass-flush "^1.0.5" + minipass-pipeline "^1.2.4" + negotiator "^0.6.2" + promise-retry "^2.0.1" + socks-proxy-agent "^6.0.0" + ssri "^8.0.0" + makeerror@1.0.12: version "1.0.12" resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.12.tgz#3e5dd2079a82e812e983cc6610c4a2cb0eaa801a" @@ -15666,6 +15750,17 @@ minipass-collect@^1.0.2: dependencies: minipass "^3.0.0" +minipass-fetch@^1.3.2: + version "1.4.1" + resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-1.4.1.tgz#d75e0091daac1b0ffd7e9d41629faff7d0c1f1b6" + integrity sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw== + dependencies: + minipass "^3.1.0" + minipass-sized "^1.0.3" + minizlib "^2.0.0" + optionalDependencies: + encoding "^0.1.12" + minipass-fetch@^2.0.3: version "2.1.2" resolved "https://registry.yarnpkg.com/minipass-fetch/-/minipass-fetch-2.1.2.tgz#95560b50c472d81a3bc76f20ede80eaed76d8add" @@ -15703,7 +15798,7 @@ minipass-json-stream@^1.0.1: jsonparse "^1.3.1" minipass "^3.0.0" -minipass-pipeline@^1.2.4: +minipass-pipeline@^1.2.2, minipass-pipeline@^1.2.4: version "1.2.4" resolved "https://registry.yarnpkg.com/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz#68472f79711c084657c067c5c6ad93cddea8214c" integrity sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A== @@ -15717,7 +15812,7 @@ minipass-sized@^1.0.3: dependencies: minipass "^3.0.0" -minipass@^3.0.0, minipass@^3.1.1, minipass@^3.1.6: +minipass@^3.0.0, minipass@^3.1.0, minipass@^3.1.1, minipass@^3.1.3, minipass@^3.1.6: version "3.3.6" resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.3.6.tgz#7bba384db3a1520d18c9c0e5251c3444e95dd94a" integrity sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw== @@ -15739,7 +15834,7 @@ minipass@^5.0.0: resolved "https://registry.yarnpkg.com/minipass/-/minipass-6.0.2.tgz#542844b6c4ce95b202c0995b0a471f1229de4c81" integrity sha512-MzWSV5nYVT7mVyWCwn2o7JH13w2TBRmmSqSRCKzTw+lmft9X4z+3wjvs06Tzijo5z4W/kahUCDpRXTF+ZrmF/w== -minizlib@^2.1.1, minizlib@^2.1.2: +minizlib@^2.0.0, minizlib@^2.1.1, minizlib@^2.1.2: version "2.1.2" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-2.1.2.tgz#e90d3466ba209b932451508a11ce3d3632145931" integrity sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg== @@ -16022,7 +16117,7 @@ ndjson@^1.4.3: split2 "^2.1.0" through2 "^2.0.3" -negotiator@0.6.3, negotiator@^0.6.3: +negotiator@0.6.3, negotiator@^0.6.2, negotiator@^0.6.3: version "0.6.3" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd" integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg== @@ -16068,6 +16163,11 @@ node-addon-api@^3.2.1: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== +node-addon-api@^4.2.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-4.3.0.tgz#52a1a0b475193e0928e98e0426a0d1254782b77f" + integrity sha512-73sE9+3UaLYYFmDsFZnqCInzPyh3MqIwZO9cw58yIqAZhONrrabrYyYe3TuIqtIiOuTXVhsGau8hcrhhwSsDIQ== + node-addon-api@^5.0.0: version "5.1.0" resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-5.1.0.tgz#49da1ca055e109a23d537e9de43c09cca21eb762" @@ -16110,6 +16210,22 @@ node-gyp-build@~4.1.0: resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.1.1.tgz#d7270b5d86717068d114cc57fff352f96d745feb" integrity sha512-dSq1xmcPDKPZ2EED2S6zw/b9NKsqzXRE6dVr8TVQnI3FJOTteUMuqF3Qqs6LZg+mLGYJWqQzMbIjMtJqTv87nQ== +node-gyp@8.x: + version "8.4.1" + resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-8.4.1.tgz#3d49308fc31f768180957d6b5746845fbd429937" + integrity sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w== + dependencies: + env-paths "^2.2.0" + glob "^7.1.4" + graceful-fs "^4.2.6" + make-fetch-happen "^9.1.0" + nopt "^5.0.0" + npmlog "^6.0.0" + rimraf "^3.0.2" + semver "^7.3.5" + tar "^6.1.2" + which "^2.0.2" + node-gyp@^9.0.0: version "9.3.1" resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-9.3.1.tgz#1e19f5f290afcc9c46973d68700cbd21a96192e4" @@ -17947,10 +18063,10 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" -posthog-js@^1.116.6: - version "1.117.0" - resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.117.0.tgz#59c3e520f6269f76ea82dce8760fbc33cdd7f48f" - integrity sha512-+I8q5G9YG6r6wOLKPT+C+AV7MRhyVFJMTJS7dfwLmmT+mkVxQ5bfC59hBkJUObOR+YRn5jn2JT/sgIslU94EZg== +posthog-js@^1.118.0: + version "1.120.0" + resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.120.0.tgz#43a364a930d200e14ddf217ed340e5085fb8afcd" + integrity sha512-A1FOJabDjt0mFg2ePfgqeZPUQl8WY6L+fNNfN/89gqvPoI7HmHta2hY/9tDQ+mBRHjj4nah5KK/EWqJHq69fGw== dependencies: fflate "^0.4.8" preact "^10.19.3" @@ -19945,6 +20061,15 @@ socket.io@4.6.1: socket.io-adapter "~2.5.2" socket.io-parser "~4.2.1" +socks-proxy-agent@^6.0.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz#2687a31f9d7185e38d530bef1944fe1f1496d6ce" + integrity sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ== + dependencies: + agent-base "^6.0.2" + debug "^4.3.3" + socks "^2.6.2" + socks-proxy-agent@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-7.0.0.tgz#dc069ecf34436621acb41e3efa66ca1b5fed15b6" @@ -20139,6 +20264,17 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== +sqlite3@5.1.6: + version "5.1.6" + resolved "https://registry.yarnpkg.com/sqlite3/-/sqlite3-5.1.6.tgz#1d4fbc90fe4fbd51e952e0a90fd8f6c2b9098e97" + integrity sha512-olYkWoKFVNSSSQNvxVUfjiVbz3YtBwTJj+mfV5zpHmqW3sELx2Cf4QCdirMelhM5Zh+KDVaKgQHqCxrqiWHybw== + dependencies: + "@mapbox/node-pre-gyp" "^1.0.0" + node-addon-api "^4.2.0" + tar "^6.1.11" + optionalDependencies: + node-gyp "8.x" + sqlstring@^2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/sqlstring/-/sqlstring-2.3.3.tgz#2ddc21f03bce2c387ed60680e739922c65751d0c" @@ -20196,6 +20332,13 @@ ssri@^10.0.0, ssri@^10.0.1: dependencies: minipass "^4.0.0" +ssri@^8.0.0, ssri@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/ssri/-/ssri-8.0.1.tgz#638e4e439e2ffbd2cd289776d5ca457c4f51a2af" + integrity sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ== + dependencies: + minipass "^3.1.1" + ssri@^9.0.0, ssri@^9.0.1: version "9.0.1" resolved "https://registry.yarnpkg.com/ssri/-/ssri-9.0.1.tgz#544d4c357a8d7b71a19700074b6883fcb4eae057" @@ -20822,6 +20965,13 @@ swagger-parser@10.0.2: dependencies: "@apidevtools/swagger-parser" "10.0.2" +swagger-parser@10.0.3: + version "10.0.3" + resolved "https://registry.yarnpkg.com/swagger-parser/-/swagger-parser-10.0.3.tgz#04cb01c18c3ac192b41161c77f81e79309135d03" + integrity sha512-nF7oMeL4KypldrQhac8RyHerJeGPD1p2xDh900GPvc+Nk7nWP6jX2FcC7WmkinMoAmoO774+AFXcWsW8gMWEIg== + dependencies: + "@apidevtools/swagger-parser" "10.0.3" + symbol-tree@^3.2.4: version "3.2.4" resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" @@ -20920,6 +21070,18 @@ tar@6.1.15, tar@^6.1.11, tar@^6.1.2: mkdirp "^1.0.3" yallist "^4.0.0" +tar@^6.0.2: + version "6.2.1" + resolved "https://registry.yarnpkg.com/tar/-/tar-6.2.1.tgz#717549c541bc3c2af15751bea94b1dd068d4b03a" + integrity sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A== + dependencies: + chownr "^2.0.0" + fs-minipass "^2.0.0" + minipass "^5.0.0" + minizlib "^2.1.1" + mkdirp "^1.0.3" + yallist "^4.0.0" + tarn@^3.0.1, tarn@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.2.tgz#73b6140fbb881b71559c4f8bfde3d9a4b3d27693" @@ -21651,6 +21813,13 @@ uniq@^1.0.1: resolved "https://registry.yarnpkg.com/uniq/-/uniq-1.0.1.tgz#b31c5ae8254844a3a8281541ce2b04b865a734ff" integrity sha512-Gw+zz50YNKPDKXs+9d+aKAjVwpjNwqzvNpLigIruT4HA9lMZNdMqs9x07kKHB/L9WRzqp4+DlTU5s4wG2esdoA== +unique-filename@^1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-1.1.1.tgz#1d69769369ada0583103a1e6ae87681b56573230" + integrity sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ== + dependencies: + unique-slug "^2.0.0" + unique-filename@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/unique-filename/-/unique-filename-2.0.1.tgz#e785f8675a9a7589e0ac77e0b5c34d2eaeac6da2" @@ -21665,6 +21834,13 @@ unique-filename@^3.0.0: dependencies: unique-slug "^4.0.0" +unique-slug@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-2.0.2.tgz#baabce91083fc64e945b0f3ad613e264f7cd4e6c" + integrity sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w== + dependencies: + imurmurhash "^0.1.4" + unique-slug@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/unique-slug/-/unique-slug-3.0.0.tgz#6d347cf57c8a7a7a6044aabd0e2d74e4d76dc7c9"