diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index f1ed6eb87e..1258bddcca 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -114,9 +114,11 @@ jobs: - name: Test run: | if ${{ env.ONLY_AFFECTED_TASKS }}; then - yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions + yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions + yarn test -- --scope=@budibase/builder --since=${{ env.NX_BASE_BRANCH }} else - yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix -- --verbose --reporters=default --reporters=github-actions + yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --ignore=@budibase/builder --no-prefix -- --verbose --reporters=default --reporters=github-actions + yarn test -- --scope=@budibase/builder --no-prefix fi test-worker: diff --git a/lerna.json b/lerna.json index 1abfec7d7c..df1d75bc05 100644 --- a/lerna.json +++ b/lerna.json @@ -1,6 +1,6 @@ { "$schema": "node_modules/lerna/schemas/lerna-schema.json", - "version": "3.2.9", + "version": "3.2.10", "npmClient": "yarn", "concurrency": 20, "command": { diff --git a/package.json b/package.json index af39a4048d..860447fc57 100644 --- a/package.json +++ b/package.json @@ -109,7 +109,7 @@ "semver": "7.5.3", "http-cache-semantics": "4.1.1", "msgpackr": "1.10.1", - "axios": "1.6.3", + "axios": "1.7.7", "xml2js": "0.6.2", "unset-value": "2.0.1", "passport": "0.6.0", @@ -119,6 +119,5 @@ }, "engines": { "node": ">=20.0.0 <21.0.0" - }, - "dependencies": {} + } } diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index dc4108d20b..8f91d1e55d 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -33,14 +33,17 @@ "@budibase/pouchdb-replication-stream": "1.2.11", "@budibase/shared-core": "0.0.0", "@budibase/types": "0.0.0", + "@techpass/passport-openidconnect": "0.3.3", "aws-cloudfront-sign": "3.0.2", - "aws-sdk": "2.1030.0", + "aws-sdk": "2.1692.0", "bcrypt": "5.1.0", "bcryptjs": "2.4.3", "bull": "4.10.1", "correlation-id": "4.0.0", - "dd-trace": "5.2.0", + "dd-trace": "5.23.0", "dotenv": "16.0.1", + "google-auth-library": "^8.0.1", + "google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5", "ioredis": "5.3.2", "joi": "17.6.0", "jsonwebtoken": "9.0.2", @@ -55,17 +58,14 @@ "pino": "8.11.0", "pino-http": "8.3.3", "posthog-node": "4.0.1", - "pouchdb": "7.3.0", - "pouchdb-find": "7.2.2", + "pouchdb": "9.0.0", + "pouchdb-find": "9.0.0", "redlock": "4.2.0", "rotating-file-stream": "3.1.0", "sanitize-s3-objectkey": "0.0.1", "semver": "^7.5.4", "tar-fs": "2.1.1", - "uuid": "^8.3.2", - "@techpass/passport-openidconnect": "0.3.3", - "google-auth-library": "^8.0.1", - "google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5" + "uuid": "^8.3.2" }, "devDependencies": { "@jest/types": "^29.6.3", @@ -78,7 +78,7 @@ "@types/lodash": "4.14.200", "@types/node": "^22.9.0", "@types/node-fetch": "2.6.4", - "@types/pouchdb": "6.4.0", + "@types/pouchdb": "6.4.2", "@types/redlock": "4.0.7", "@types/semver": "7.3.7", "@types/tar-fs": "2.0.1", diff --git a/packages/pro b/packages/pro index bfeece324a..4facf6a44e 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit bfeece324a03a3a5f25137bf3f8c66d5ed6103d8 +Subproject commit 4facf6a44ee52a405794845f71584168b9db652c diff --git a/packages/server/package.json b/packages/server/package.json index 2539db3446..e68fbdaa0e 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -63,13 +63,13 @@ "@bull-board/koa": "5.10.2", "@elastic/elasticsearch": "7.10.0", "@google-cloud/firestore": "7.8.0", - "@koa/router": "8.0.8", + "@koa/router": "13.1.0", "@socket.io/redis-adapter": "^8.2.1", "@types/xml2js": "^0.4.14", "airtable": "0.12.2", "arangojs": "7.2.0", "archiver": "7.0.1", - "aws-sdk": "2.1030.0", + "aws-sdk": "2.1692.0", "bcrypt": "5.1.0", "bcryptjs": "2.4.3", "bson": "^6.9.0", @@ -80,8 +80,8 @@ "cookies": "0.8.0", "csvtojson": "2.0.10", "curlconverter": "3.21.0", + "dd-trace": "5.23.0", "dayjs": "^1.10.8", - "dd-trace": "5.2.0", "dotenv": "8.2.0", "form-data": "4.0.0", "global-agent": "3.0.0", @@ -89,7 +89,7 @@ "google-spreadsheet": "npm:@budibase/google-spreadsheet@4.1.5", "ioredis": "5.3.2", "isolated-vm": "^4.7.2", - "jimp": "0.22.12", + "jimp": "1.1.4", "joi": "17.6.0", "js-yaml": "4.1.0", "jsonschema": "1.4.0", @@ -104,7 +104,7 @@ "lodash": "4.17.21", "memorystream": "0.3.1", "mongodb": "6.7.0", - "mssql": "10.0.1", + "mssql": "11.0.1", "mysql2": "3.9.8", "node-fetch": "2.6.7", "object-sizeof": "2.6.1", @@ -112,15 +112,15 @@ "openapi-types": "9.3.1", "oracledb": "6.5.1", "pg": "8.10.0", - "pouchdb": "7.3.0", + "pouchdb": "9.0.0", "pouchdb-all-dbs": "1.1.1", - "pouchdb-find": "7.2.2", + "pouchdb-find": "9.0.0", "redis": "4", "semver": "^7.5.4", "serialize-error": "^7.0.1", "server-destroy": "1.0.1", - "snowflake-promise": "^4.5.0", - "socket.io": "4.7.5", + "snowflake-sdk": "^1.15.0", + "socket.io": "4.8.1", "svelte": "^4.2.10", "tar": "6.2.1", "tmp": "0.2.3", @@ -128,7 +128,7 @@ "uuid": "^8.3.2", "validate.js": "0.13.1", "worker-farm": "1.7.0", - "xml2js": "0.5.0", + "xml2js": "0.6.2", "zod-validation-error": "^3.4.0" }, "devDependencies": { @@ -142,13 +142,14 @@ "@types/jest": "29.5.5", "@types/koa": "2.13.4", "@types/koa-send": "^4.1.6", - "@types/koa__router": "8.0.8", + "@types/koa__router": "12.0.4", "@types/lodash": "4.14.200", - "@types/mssql": "9.1.4", + "@types/mssql": "9.1.5", "@types/node": "^22.9.0", "@types/node-fetch": "2.6.4", "@types/oracledb": "6.5.1", "@types/pg": "8.6.6", + "@types/pouchdb": "6.4.2", "@types/server-destroy": "1.0.1", "@types/supertest": "2.0.14", "@types/tar": "6.1.5", diff --git a/packages/server/src/api/controllers/query/import/sources/curl.ts b/packages/server/src/api/controllers/query/import/sources/curl.ts index ba85d82be0..5742d254af 100644 --- a/packages/server/src/api/controllers/query/import/sources/curl.ts +++ b/packages/server/src/api/controllers/query/import/sources/curl.ts @@ -4,7 +4,7 @@ import { URL } from "url" const curlconverter = require("curlconverter") -const parseCurl = (data: string): any => { +const parseCurl = (data: string): Promise => { const curlJson = curlconverter.toJsonString(data) return JSON.parse(curlJson) } @@ -53,8 +53,7 @@ export class Curl extends ImportSource { isSupported = async (data: string): Promise => { try { - const curl = parseCurl(data) - this.curl = curl + this.curl = parseCurl(data) } catch (err) { return false } diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index 6b811cc716..f3fac5b99b 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -164,9 +164,12 @@ describe("/datasources", () => { }) }) -datasourceDescribe( - { name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] }, - ({ config, dsProvider }) => { +const descriptions = datasourceDescribe({ + exclude: [DatabaseName.MONGODB, DatabaseName.SQS], +}) + +if (descriptions.length) { + describe.each(descriptions)("$dbName", ({ config, dsProvider }) => { let datasource: Datasource let rawDatasource: Datasource let client: Knex @@ -492,5 +495,5 @@ datasourceDescribe( ) }) }) - } -) + }) +} diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts index aea4e5b2a2..44b21e0350 100644 --- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts +++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts @@ -14,583 +14,479 @@ import { events } from "@budibase/backend-core" import { Knex } from "knex" import { generator } from "@budibase/backend-core/tests" -datasourceDescribe( - { name: "queries (%s)", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] }, - ({ config, dsProvider, isOracle, isMSSQL, isPostgres }) => { - let rawDatasource: Datasource - let datasource: Datasource - let client: Knex +const descriptions = datasourceDescribe({ + exclude: [DatabaseName.MONGODB, DatabaseName.SQS], +}) - let tableName: string +if (descriptions.length) { + describe.each(descriptions)( + "queries ($dbName)", + ({ config, dsProvider, isOracle, isMSSQL, isPostgres }) => { + let rawDatasource: Datasource + let datasource: Datasource + let client: Knex - async function createQuery( - query: Partial, - expectations?: Expectations - ): Promise { - const defaultQuery: Query = { - datasourceId: datasource._id!, - name: "New Query", - parameters: [], - fields: {}, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - } - if (query.fields?.sql && typeof query.fields.sql !== "string") { - throw new Error("Unable to create with knex structure in 'sql' field") - } - return await config.api.query.save( - { ...defaultQuery, ...query }, - expectations - ) - } + let tableName: string - beforeAll(async () => { - const ds = await dsProvider() - rawDatasource = ds.rawDatasource! - datasource = ds.datasource! - client = ds.client! - }) - - beforeEach(async () => { - // The Datasource API doesn ot return the password, but we need it later to - // connect to the underlying database, so we fill it back in here. - datasource.config!.password = rawDatasource.config!.password - - tableName = generator.guid() - - await client.schema.dropTableIfExists(tableName) - await client.schema.createTable(tableName, table => { - table.increments("id").primary() - table.string("name") - table.timestamp("birthday") - table.integer("number") - }) - - await client(tableName).insert([ - { name: "one" }, - { name: "two" }, - { name: "three" }, - { name: "four" }, - { name: "five" }, - ]) - - jest.clearAllMocks() - }) - - describe("query admin", () => { - describe("create", () => { - it("should be able to create a query", async () => { - const query = await createQuery({ - name: "New Query", - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - expect(query).toMatchObject({ - datasourceId: datasource._id!, - name: "New Query", - parameters: [], - fields: { - sql: client(tableName).select("*").toString(), - }, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - createdAt: expect.any(String), - updatedAt: expect.any(String), - }) - - expect(events.query.created).toHaveBeenCalledTimes(1) - expect(events.query.updated).not.toHaveBeenCalled() - }) - }) - - describe("update", () => { - it("should be able to update a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - jest.clearAllMocks() - - const updatedQuery = await config.api.query.save({ - ...query, - name: "Updated Query", - fields: { - sql: client(tableName).where({ id: 1 }).toString(), - }, - }) - - expect(updatedQuery).toMatchObject({ - datasourceId: datasource._id!, - name: "Updated Query", - parameters: [], - fields: { - sql: client(tableName).where({ id: 1 }).toString(), - }, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - }) - - expect(events.query.created).not.toHaveBeenCalled() - expect(events.query.updated).toHaveBeenCalledTimes(1) - }) - }) - - describe("delete", () => { - it("should be able to delete a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - await config.api.query.delete(query) - await config.api.query.get(query._id!, { status: 404 }) - - const queries = await config.api.query.fetch() - expect(queries).not.toContainEqual(query) - - expect(events.query.deleted).toHaveBeenCalledTimes(1) - expect(events.query.deleted).toHaveBeenCalledWith(datasource, query) - }) - }) - - describe("read", () => { - it("should be able to list queries", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - const queries = await config.api.query.fetch() - expect(queries).toContainEqual(query) - }) - - it("should strip sensitive fields for prod apps", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - await config.api.application.publish(config.getAppId()) - const prodQuery = await config.api.query.getProd(query._id!) - - expect(prodQuery._id).toEqual(query._id) - expect(prodQuery.fields).toBeUndefined() - expect(prodQuery.parameters).toBeUndefined() - expect(prodQuery.schema).toBeDefined() - }) - - isPostgres && - it("should be able to handle a JSON aggregate with newlines", async () => { - const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)` - const query = await createQuery({ - fields: { - sql: client(tableName) - .select([ - "*", - client.raw( - `${jsonStatement} as json,\n${jsonStatement} as json2` - ), - ]) - .toString(), - }, - }) - const res = await config.api.query.execute( - query._id!, - {}, - { - status: 200, - } - ) - expect(res).toBeDefined() - }) - }) - }) - - describe("preview", () => { - it("should be able to preview a query", async () => { - const request: QueryPreview = { + async function createQuery( + query: Partial, + expectations?: Expectations + ): Promise { + const defaultQuery: Query = { datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: client(tableName).where({ id: 1 }).toString(), - }, + name: "New Query", parameters: [], - transformer: "return data", - name: datasource.name!, + fields: {}, schema: {}, + queryVerb: "read", + transformer: "return data", readable: true, } - const response = await config.api.query.preview(request) - expect(response.schema).toEqual({ - birthday: { - name: "birthday", - type: "string", - }, - id: { - name: "id", - type: "number", - }, - name: { - name: "name", - type: "string", - }, - number: { - name: "number", - type: "string", - }, - }) - expect(response.rows).toEqual([ - { - birthday: null, - id: 1, - name: "one", - number: null, - }, - ]) - expect(events.query.previewed).toHaveBeenCalledTimes(1) + if (query.fields?.sql && typeof query.fields.sql !== "string") { + throw new Error("Unable to create with knex structure in 'sql' field") + } + return await config.api.query.save( + { ...defaultQuery, ...query }, + expectations + ) + } + + beforeAll(async () => { + const ds = await dsProvider() + rawDatasource = ds.rawDatasource! + datasource = ds.datasource! + client = ds.client! }) - it("should update schema when column type changes from number to string", async () => { - const tableName = "schema_change_test" - await client.schema.dropTableIfExists(tableName) + beforeEach(async () => { + // The Datasource API doesn ot return the password, but we need it later to + // connect to the underlying database, so we fill it back in here. + datasource.config!.password = rawDatasource.config!.password + tableName = generator.guid() + + await client.schema.dropTableIfExists(tableName) await client.schema.createTable(tableName, table => { table.increments("id").primary() table.string("name") - table.integer("data") + table.timestamp("birthday") + table.integer("number") }) - await client(tableName).insert({ - name: "test", - data: 123, - }) - - const firstPreview = await config.api.query.preview({ - datasourceId: datasource._id!, - name: "Test Query", - queryVerb: "read", - fields: { - sql: client(tableName).select("*").toString(), - }, - parameters: [], - transformer: "return data", - schema: {}, - readable: true, - }) - - expect(firstPreview.schema).toEqual( - expect.objectContaining({ - data: { type: "number", name: "data" }, - }) - ) - - await client(tableName).delete() - await client.schema.alterTable(tableName, table => { - table.string("data").alter() - }) - - await client(tableName).insert({ - name: "test", - data: "string value", - }) - - const secondPreview = await config.api.query.preview({ - datasourceId: datasource._id!, - name: "Test Query", - queryVerb: "read", - fields: { - sql: client(tableName).select("*").toString(), - }, - parameters: [], - transformer: "return data", - schema: firstPreview.schema, - readable: true, - }) - - expect(secondPreview.schema).toEqual( - expect.objectContaining({ - data: { type: "string", name: "data" }, - }) - ) - }) - - it("should work with static variables", async () => { - const datasource = await config.api.datasource.create({ - ...rawDatasource, - config: { - ...rawDatasource.config, - staticVariables: { - foo: "bar", - }, - }, - }) - - const request: QueryPreview = { - datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, - }, - parameters: [], - transformer: "return data", - name: datasource.name!, - schema: {}, - readable: true, - } - - const response = await config.api.query.preview(request) - - let key = isOracle ? "FOO" : "foo" - expect(response.schema).toEqual({ - [key]: { - name: key, - type: "string", - }, - }) - - expect(response.rows).toEqual([ - { - [key]: "bar", - }, + await client(tableName).insert([ + { name: "one" }, + { name: "two" }, + { name: "three" }, + { name: "four" }, + { name: "five" }, ]) + + jest.clearAllMocks() }) - it("should work with dynamic variables", async () => { - const datasource = await config.api.datasource.create(rawDatasource) - - const basedOnQuery = await createQuery({ - datasourceId: datasource._id!, - fields: { - sql: client(tableName).select("name").where({ id: 1 }).toString(), - }, - }) - - await config.api.datasource.update({ - ...datasource, - config: { - ...datasource.config, - dynamicVariables: [ - { - queryId: basedOnQuery._id!, - name: "foo", - value: "{{ data[0].name }}", + describe("query admin", () => { + describe("create", () => { + it("should be able to create a query", async () => { + const query = await createQuery({ + name: "New Query", + fields: { + sql: client(tableName).select("*").toString(), }, - ], - }, - }) + }) - const preview = await config.api.query.preview({ - datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, - }, - parameters: [], - transformer: "return data", - name: datasource.name!, - schema: {}, - readable: true, - }) - - let key = isOracle ? "FOO" : "foo" - expect(preview.schema).toEqual({ - [key]: { - name: key, - type: "string", - }, - }) - - expect(preview.rows).toEqual([ - { - [key]: "one", - }, - ]) - }) - - it("should handle the dynamic base query being deleted", async () => { - const datasource = await config.api.datasource.create(rawDatasource) - - const basedOnQuery = await createQuery({ - datasourceId: datasource._id!, - fields: { - sql: client(tableName).select("name").where({ id: 1 }).toString(), - }, - }) - - await config.api.datasource.update({ - ...datasource, - config: { - ...datasource.config, - dynamicVariables: [ - { - queryId: basedOnQuery._id!, - name: "foo", - value: "{{ data[0].name }}", + expect(query).toMatchObject({ + datasourceId: datasource._id!, + name: "New Query", + parameters: [], + fields: { + sql: client(tableName).select("*").toString(), }, - ], - }, + schema: {}, + queryVerb: "read", + transformer: "return data", + readable: true, + createdAt: expect.any(String), + updatedAt: expect.any(String), + }) + + expect(events.query.created).toHaveBeenCalledTimes(1) + expect(events.query.updated).not.toHaveBeenCalled() + }) }) - await config.api.query.delete(basedOnQuery) - - const preview = await config.api.query.preview({ - datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, - }, - parameters: [], - transformer: "return data", - name: datasource.name!, - schema: {}, - readable: true, - }) - - let key = isOracle ? "FOO" : "foo" - expect(preview.schema).toEqual({ - [key]: { - name: key, - type: "string", - }, - }) - - expect(preview.rows).toEqual([{ [key]: isMSSQL ? "" : null }]) - }) - }) - - describe("query verbs", () => { - describe("create", () => { - it("should be able to insert with bindings", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), - }, - parameters: [ - { - name: "foo", - default: "bar", + describe("update", () => { + it("should be able to update a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), }, - ], - queryVerb: "create", + }) + + jest.clearAllMocks() + + const updatedQuery = await config.api.query.save({ + ...query, + name: "Updated Query", + fields: { + sql: client(tableName).where({ id: 1 }).toString(), + }, + }) + + expect(updatedQuery).toMatchObject({ + datasourceId: datasource._id!, + name: "Updated Query", + parameters: [], + fields: { + sql: client(tableName).where({ id: 1 }).toString(), + }, + schema: {}, + queryVerb: "read", + transformer: "return data", + readable: true, + }) + + expect(events.query.created).not.toHaveBeenCalled() + expect(events.query.updated).toHaveBeenCalledTimes(1) + }) + }) + + describe("delete", () => { + it("should be able to delete a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), + }, + }) + + await config.api.query.delete(query) + await config.api.query.get(query._id!, { status: 404 }) + + const queries = await config.api.query.fetch() + expect(queries).not.toContainEqual(query) + + expect(events.query.deleted).toHaveBeenCalledTimes(1) + expect(events.query.deleted).toHaveBeenCalledWith(datasource, query) + }) + }) + + describe("read", () => { + it("should be able to list queries", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), + }, + }) + + const queries = await config.api.query.fetch() + expect(queries).toContainEqual(query) }) - const result = await config.api.query.execute(query._id!, { - parameters: { - foo: "baz", - }, + it("should strip sensitive fields for prod apps", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), + }, + }) + + await config.api.application.publish(config.getAppId()) + const prodQuery = await config.api.query.getProd(query._id!) + + expect(prodQuery._id).toEqual(query._id) + expect(prodQuery.fields).toBeUndefined() + expect(prodQuery.parameters).toBeUndefined() + expect(prodQuery.schema).toBeDefined() }) - expect(result.data).toEqual([ - { - created: true, - }, - ]) - - const rows = await client(tableName).where({ name: "baz" }).select() - expect(rows).toHaveLength(1) - for (const row of rows) { - expect(row).toMatchObject({ name: "baz" }) - } - }) - - it("should not allow handlebars as parameters", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), - }, - parameters: [ - { - name: "foo", - default: "bar", - }, - ], - queryVerb: "create", - }) - - await config.api.query.execute( - query._id!, - { - parameters: { - foo: "{{ 'test' }}", - }, - }, - { - status: 400, - body: { - message: - "Parameter 'foo' input contains a handlebars binding - this is not allowed.", - }, - } - ) - }) - - // Oracle doesn't automatically coerce strings into dates. - !isOracle && - it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])( - "should coerce %s into a date", - async datetimeStr => { - const date = new Date(datetimeStr) + isPostgres && + it("should be able to handle a JSON aggregate with newlines", async () => { + const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)` const query = await createQuery({ fields: { sql: client(tableName) - .insert({ - name: "foo", - birthday: client.raw("{{ birthday }}"), - }) + .select([ + "*", + client.raw( + `${jsonStatement} as json,\n${jsonStatement} as json2` + ), + ]) .toString(), }, - parameters: [ - { - name: "birthday", - default: "", - }, - ], - queryVerb: "create", }) + const res = await config.api.query.execute( + query._id!, + {}, + { + status: 200, + } + ) + expect(res).toBeDefined() + }) + }) + }) - const result = await config.api.query.execute(query._id!, { - parameters: { birthday: datetimeStr }, - }) + describe("preview", () => { + it("should be able to preview a query", async () => { + const request: QueryPreview = { + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: client(tableName).where({ id: 1 }).toString(), + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + } + const response = await config.api.query.preview(request) + expect(response.schema).toEqual({ + birthday: { + name: "birthday", + type: "string", + }, + id: { + name: "id", + type: "number", + }, + name: { + name: "name", + type: "string", + }, + number: { + name: "number", + type: "string", + }, + }) + expect(response.rows).toEqual([ + { + birthday: null, + id: 1, + name: "one", + number: null, + }, + ]) + expect(events.query.previewed).toHaveBeenCalledTimes(1) + }) - expect(result.data).toEqual([{ created: true }]) + it("should update schema when column type changes from number to string", async () => { + const tableName = "schema_change_test" + await client.schema.dropTableIfExists(tableName) - const rows = await client(tableName) - .where({ birthday: datetimeStr }) - .select() - expect(rows).toHaveLength(1) + await client.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + table.integer("data") + }) - for (const row of rows) { - expect(new Date(row.birthday)).toEqual(date) - } - } + await client(tableName).insert({ + name: "test", + data: 123, + }) + + const firstPreview = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "Test Query", + queryVerb: "read", + fields: { + sql: client(tableName).select("*").toString(), + }, + parameters: [], + transformer: "return data", + schema: {}, + readable: true, + }) + + expect(firstPreview.schema).toEqual( + expect.objectContaining({ + data: { type: "number", name: "data" }, + }) ) - it.each(["2021,02,05", "202205-1500"])( - "should not coerce %s as a date", - async notDateStr => { + await client(tableName).delete() + await client.schema.alterTable(tableName, table => { + table.string("data").alter() + }) + + await client(tableName).insert({ + name: "test", + data: "string value", + }) + + const secondPreview = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "Test Query", + queryVerb: "read", + fields: { + sql: client(tableName).select("*").toString(), + }, + parameters: [], + transformer: "return data", + schema: firstPreview.schema, + readable: true, + }) + + expect(secondPreview.schema).toEqual( + expect.objectContaining({ + data: { type: "string", name: "data" }, + }) + ) + }) + + it("should work with static variables", async () => { + const datasource = await config.api.datasource.create({ + ...rawDatasource, + config: { + ...rawDatasource.config, + staticVariables: { + foo: "bar", + }, + }, + }) + + const request: QueryPreview = { + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + } + + const response = await config.api.query.preview(request) + + let key = isOracle ? "FOO" : "foo" + expect(response.schema).toEqual({ + [key]: { + name: key, + type: "string", + }, + }) + + expect(response.rows).toEqual([ + { + [key]: "bar", + }, + ]) + }) + + it("should work with dynamic variables", async () => { + const datasource = await config.api.datasource.create(rawDatasource) + + const basedOnQuery = await createQuery({ + datasourceId: datasource._id!, + fields: { + sql: client(tableName).select("name").where({ id: 1 }).toString(), + }, + }) + + await config.api.datasource.update({ + ...datasource, + config: { + ...datasource.config, + dynamicVariables: [ + { + queryId: basedOnQuery._id!, + name: "foo", + value: "{{ data[0].name }}", + }, + ], + }, + }) + + const preview = await config.api.query.preview({ + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + }) + + let key = isOracle ? "FOO" : "foo" + expect(preview.schema).toEqual({ + [key]: { + name: key, + type: "string", + }, + }) + + expect(preview.rows).toEqual([ + { + [key]: "one", + }, + ]) + }) + + it("should handle the dynamic base query being deleted", async () => { + const datasource = await config.api.datasource.create(rawDatasource) + + const basedOnQuery = await createQuery({ + datasourceId: datasource._id!, + fields: { + sql: client(tableName).select("name").where({ id: 1 }).toString(), + }, + }) + + await config.api.datasource.update({ + ...datasource, + config: { + ...datasource.config, + dynamicVariables: [ + { + queryId: basedOnQuery._id!, + name: "foo", + value: "{{ data[0].name }}", + }, + ], + }, + }) + + await config.api.query.delete(basedOnQuery) + + const preview = await config.api.query.preview({ + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + }) + + let key = isOracle ? "FOO" : "foo" + expect(preview.schema).toEqual({ + [key]: { + name: key, + type: "string", + }, + }) + + expect(preview.rows).toEqual([{ [key]: isMSSQL ? "" : null }]) + }) + }) + + describe("query verbs", () => { + describe("create", () => { + it("should be able to insert with bindings", async () => { const query = await createQuery({ fields: { - sql: client(tableName) - .insert({ name: client.raw("{{ name }}") }) - .toString(), + sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), }, parameters: [ { - name: "name", - default: "", + name: "foo", + default: "bar", }, ], queryVerb: "create", @@ -598,274 +494,442 @@ datasourceDescribe( const result = await config.api.query.execute(query._id!, { parameters: { - name: notDateStr, + foo: "baz", }, }) - expect(result.data).toEqual([{ created: true }]) + expect(result.data).toEqual([ + { + created: true, + }, + ]) - const rows = await client(tableName) - .where({ name: notDateStr }) - .select() + const rows = await client(tableName).where({ name: "baz" }).select() expect(rows).toHaveLength(1) - } - ) - }) - - describe("read", () => { - it("should execute a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").orderBy("id").toString(), - }, + for (const row of rows) { + expect(row).toMatchObject({ name: "baz" }) + } }) - const result = await config.api.query.execute(query._id!) + it("should not allow handlebars as parameters", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), + }, + parameters: [ + { + name: "foo", + default: "bar", + }, + ], + queryVerb: "create", + }) - expect(result.data).toEqual([ - { - id: 1, - name: "one", - birthday: null, - number: null, - }, - { - id: 2, - name: "two", - birthday: null, - number: null, - }, - { - id: 3, - name: "three", - birthday: null, - number: null, - }, - { - id: 4, - name: "four", - birthday: null, - number: null, - }, - { - id: 5, - name: "five", - birthday: null, - number: null, - }, - ]) + await config.api.query.execute( + query._id!, + { + parameters: { + foo: "{{ 'test' }}", + }, + }, + { + status: 400, + body: { + message: + "Parameter 'foo' input contains a handlebars binding - this is not allowed.", + }, + } + ) + }) + + // Oracle doesn't automatically coerce strings into dates. + !isOracle && + it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])( + "should coerce %s into a date", + async datetimeStr => { + const date = new Date(datetimeStr) + const query = await createQuery({ + fields: { + sql: client(tableName) + .insert({ + name: "foo", + birthday: client.raw("{{ birthday }}"), + }) + .toString(), + }, + parameters: [ + { + name: "birthday", + default: "", + }, + ], + queryVerb: "create", + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { birthday: datetimeStr }, + }) + + expect(result.data).toEqual([{ created: true }]) + + const rows = await client(tableName) + .where({ birthday: datetimeStr }) + .select() + expect(rows).toHaveLength(1) + + for (const row of rows) { + expect(new Date(row.birthday)).toEqual(date) + } + } + ) + + it.each(["2021,02,05", "202205-1500"])( + "should not coerce %s as a date", + async notDateStr => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .insert({ name: client.raw("{{ name }}") }) + .toString(), + }, + parameters: [ + { + name: "name", + default: "", + }, + ], + queryVerb: "create", + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { + name: notDateStr, + }, + }) + + expect(result.data).toEqual([{ created: true }]) + + const rows = await client(tableName) + .where({ name: notDateStr }) + .select() + expect(rows).toHaveLength(1) + } + ) }) - it("should be able to transform a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).where({ id: 1 }).select("*").toString(), - }, - transformer: ` + describe("read", () => { + it("should execute a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").orderBy("id").toString(), + }, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + id: 1, + name: "one", + birthday: null, + number: null, + }, + { + id: 2, + name: "two", + birthday: null, + number: null, + }, + { + id: 3, + name: "three", + birthday: null, + number: null, + }, + { + id: 4, + name: "four", + birthday: null, + number: null, + }, + { + id: 5, + name: "five", + birthday: null, + number: null, + }, + ]) + }) + + it("should be able to transform a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).where({ id: 1 }).select("*").toString(), + }, + transformer: ` data[0].id = data[0].id + 1; return data; `, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + id: 2, + name: "one", + birthday: null, + number: null, + }, + ]) }) - const result = await config.api.query.execute(query._id!) + it("should coerce numeric bindings", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .where({ id: client.raw("{{ id }}") }) + .select("*") + .toString(), + }, + parameters: [ + { + name: "id", + default: "", + }, + ], + }) - expect(result.data).toEqual([ - { - id: 2, - name: "one", - birthday: null, - number: null, - }, - ]) + const result = await config.api.query.execute(query._id!, { + parameters: { + id: "1", + }, + }) + + expect(result.data).toEqual([ + { + id: 1, + name: "one", + birthday: null, + number: null, + }, + ]) + }) }) - it("should coerce numeric bindings", async () => { + describe("update", () => { + it("should be able to update rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .update({ name: client.raw("{{ name }}") }) + .where({ id: client.raw("{{ id }}") }) + .toString(), + }, + parameters: [ + { + name: "id", + default: "", + }, + { + name: "name", + default: "updated", + }, + ], + queryVerb: "update", + }) + + await config.api.query.execute(query._id!, { + parameters: { + id: "1", + name: "foo", + }, + }) + + const rows = await client(tableName).where({ id: 1 }).select() + expect(rows).toEqual([ + { id: 1, name: "foo", birthday: null, number: null }, + ]) + }) + + it("should be able to execute an update that updates no rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .update({ name: "updated" }) + .where({ id: 100 }) + .toString(), + }, + queryVerb: "update", + }) + + await config.api.query.execute(query._id!) + + const rows = await client(tableName).select() + for (const row of rows) { + expect(row.name).not.toEqual("updated") + } + }) + + it("should be able to execute a delete that deletes no rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).where({ id: 100 }).delete().toString(), + }, + queryVerb: "delete", + }) + + await config.api.query.execute(query._id!) + + const rows = await client(tableName).select() + expect(rows).toHaveLength(5) + }) + }) + + describe("delete", () => { + it("should be able to delete rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .where({ id: client.raw("{{ id }}") }) + .delete() + .toString(), + }, + parameters: [ + { + name: "id", + default: "", + }, + ], + queryVerb: "delete", + }) + + await config.api.query.execute(query._id!, { + parameters: { + id: "1", + }, + }) + + const rows = await client(tableName).where({ id: 1 }).select() + expect(rows).toHaveLength(0) + }) + }) + }) + + describe("query through datasource", () => { + it("should be able to query the datasource", async () => { + const datasource = await config.api.datasource.create(rawDatasource) + + const entityId = tableName + await config.api.datasource.update({ + ...datasource, + entities: { + [entityId]: { + name: entityId, + schema: {}, + type: "table", + primary: ["id"], + sourceId: datasource._id!, + sourceType: TableSourceType.EXTERNAL, + }, + }, + }) + + const res = await config.api.datasource.query({ + endpoint: { + datasourceId: datasource._id!, + operation: Operation.READ, + entityId, + }, + resource: { + fields: ["id", "name"], + }, + filters: { + string: { + name: "two", + }, + }, + }) + expect(res).toHaveLength(1) + expect(res[0]).toEqual({ + id: 2, + name: "two", + // the use of table.* introduces the possibility of nulls being returned + birthday: null, + number: null, + }) + }) + + // this parameter really only impacts SQL queries + describe("confirm nullDefaultSupport", () => { + let queryParams: Partial + beforeAll(async () => { + queryParams = { + fields: { + sql: client(tableName) + .insert({ + name: client.raw("{{ bindingName }}"), + number: client.raw("{{ bindingNumber }}"), + }) + .toString(), + }, + parameters: [ + { + name: "bindingName", + default: "", + }, + { + name: "bindingNumber", + default: "", + }, + ], + queryVerb: "create", + } + }) + + it("should error for old queries", async () => { + const query = await createQuery(queryParams) + await config.api.query.save({ ...query, nullDefaultSupport: false }) + let error: string | undefined + try { + await config.api.query.execute(query._id!, { + parameters: { + bindingName: "testing", + }, + }) + } catch (err: any) { + error = err.message + } + if (isMSSQL || isOracle) { + expect(error).toBeUndefined() + } else { + expect(error).toBeDefined() + expect(error).toContain("integer") + } + }) + + it("should not error for new queries", async () => { + const query = await createQuery(queryParams) + const results = await config.api.query.execute(query._id!, { + parameters: { + bindingName: "testing", + }, + }) + expect(results).toEqual({ data: [{ created: true }] }) + }) + }) + }) + + describe("edge cases", () => { + it("should find rows with a binding containing a slash", async () => { + const slashValue = "1/10" + await client(tableName).insert([{ name: slashValue }]) + const query = await createQuery({ fields: { sql: client(tableName) - .where({ id: client.raw("{{ id }}") }) .select("*") - .toString(), - }, - parameters: [ - { - name: "id", - default: "", - }, - ], - }) - - const result = await config.api.query.execute(query._id!, { - parameters: { - id: "1", - }, - }) - - expect(result.data).toEqual([ - { - id: 1, - name: "one", - birthday: null, - number: null, - }, - ]) - }) - }) - - describe("update", () => { - it("should be able to update rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName) - .update({ name: client.raw("{{ name }}") }) - .where({ id: client.raw("{{ id }}") }) - .toString(), - }, - parameters: [ - { - name: "id", - default: "", - }, - { - name: "name", - default: "updated", - }, - ], - queryVerb: "update", - }) - - await config.api.query.execute(query._id!, { - parameters: { - id: "1", - name: "foo", - }, - }) - - const rows = await client(tableName).where({ id: 1 }).select() - expect(rows).toEqual([ - { id: 1, name: "foo", birthday: null, number: null }, - ]) - }) - - it("should be able to execute an update that updates no rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName) - .update({ name: "updated" }) - .where({ id: 100 }) - .toString(), - }, - queryVerb: "update", - }) - - await config.api.query.execute(query._id!) - - const rows = await client(tableName).select() - for (const row of rows) { - expect(row.name).not.toEqual("updated") - } - }) - - it("should be able to execute a delete that deletes no rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).where({ id: 100 }).delete().toString(), - }, - queryVerb: "delete", - }) - - await config.api.query.execute(query._id!) - - const rows = await client(tableName).select() - expect(rows).toHaveLength(5) - }) - }) - - describe("delete", () => { - it("should be able to delete rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName) - .where({ id: client.raw("{{ id }}") }) - .delete() - .toString(), - }, - parameters: [ - { - name: "id", - default: "", - }, - ], - queryVerb: "delete", - }) - - await config.api.query.execute(query._id!, { - parameters: { - id: "1", - }, - }) - - const rows = await client(tableName).where({ id: 1 }).select() - expect(rows).toHaveLength(0) - }) - }) - }) - - describe("query through datasource", () => { - it("should be able to query the datasource", async () => { - const datasource = await config.api.datasource.create(rawDatasource) - - const entityId = tableName - await config.api.datasource.update({ - ...datasource, - entities: { - [entityId]: { - name: entityId, - schema: {}, - type: "table", - primary: ["id"], - sourceId: datasource._id!, - sourceType: TableSourceType.EXTERNAL, - }, - }, - }) - - const res = await config.api.datasource.query({ - endpoint: { - datasourceId: datasource._id!, - operation: Operation.READ, - entityId, - }, - resource: { - fields: ["id", "name"], - }, - filters: { - string: { - name: "two", - }, - }, - }) - expect(res).toHaveLength(1) - expect(res[0]).toEqual({ - id: 2, - name: "two", - // the use of table.* introduces the possibility of nulls being returned - birthday: null, - number: null, - }) - }) - - // this parameter really only impacts SQL queries - describe("confirm nullDefaultSupport", () => { - let queryParams: Partial - beforeAll(async () => { - queryParams = { - fields: { - sql: client(tableName) - .insert({ - name: client.raw("{{ bindingName }}"), - number: client.raw("{{ bindingNumber }}"), - }) + .where("name", "=", client.raw("{{ bindingName }}")) .toString(), }, parameters: [ @@ -873,76 +937,18 @@ datasourceDescribe( name: "bindingName", default: "", }, - { - name: "bindingNumber", - default: "", - }, ], - queryVerb: "create", - } - }) - - it("should error for old queries", async () => { - const query = await createQuery(queryParams) - await config.api.query.save({ ...query, nullDefaultSupport: false }) - let error: string | undefined - try { - await config.api.query.execute(query._id!, { - parameters: { - bindingName: "testing", - }, - }) - } catch (err: any) { - error = err.message - } - if (isMSSQL || isOracle) { - expect(error).toBeUndefined() - } else { - expect(error).toBeDefined() - expect(error).toContain("integer") - } - }) - - it("should not error for new queries", async () => { - const query = await createQuery(queryParams) + queryVerb: "read", + }) const results = await config.api.query.execute(query._id!, { parameters: { - bindingName: "testing", + bindingName: slashValue, }, }) - expect(results).toEqual({ data: [{ created: true }] }) + expect(results).toBeDefined() + expect(results.data.length).toEqual(1) }) }) - }) - - describe("edge cases", () => { - it("should find rows with a binding containing a slash", async () => { - const slashValue = "1/10" - await client(tableName).insert([{ name: slashValue }]) - - const query = await createQuery({ - fields: { - sql: client(tableName) - .select("*") - .where("name", "=", client.raw("{{ bindingName }}")) - .toString(), - }, - parameters: [ - { - name: "bindingName", - default: "", - }, - ], - queryVerb: "read", - }) - const results = await config.api.query.execute(query._id!, { - parameters: { - bindingName: slashValue, - }, - }) - expect(results).toBeDefined() - expect(results.data.length).toEqual(1) - }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts index 44d1553f9b..a37957fe7e 100644 --- a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts +++ b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts @@ -9,465 +9,698 @@ import { generator } from "@budibase/backend-core/tests" const expectValidId = expect.stringMatching(/^\w{24}$/) const expectValidBsonObjectId = expect.any(BSON.ObjectId) -datasourceDescribe( - { name: "/queries", only: [DatabaseName.MONGODB] }, - ({ config, dsProvider }) => { - let collection: string - let datasource: Datasource +const descriptions = datasourceDescribe({ only: [DatabaseName.MONGODB] }) - async function createQuery(query: Partial): Promise { - const defaultQuery: Query = { - datasourceId: datasource._id!, - name: "New Query", - parameters: [], - fields: {}, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - } - const combinedQuery = { ...defaultQuery, ...query } - if ( - combinedQuery.fields && - combinedQuery.fields.extra && - !combinedQuery.fields.extra.collection - ) { - combinedQuery.fields.extra.collection = collection - } - return await config.api.query.save(combinedQuery) - } +if (descriptions.length) { + describe.each(descriptions)( + "/queries ($dbName)", + ({ config, dsProvider }) => { + let collection: string + let datasource: Datasource - async function withClient( - callback: (client: MongoClient) => Promise - ): Promise { - const client = new MongoClient(datasource.config!.connectionString) - await client.connect() - try { - return await callback(client) - } finally { - await client.close() - } - } - - async function withDb(callback: (db: Db) => Promise): Promise { - return await withClient(async client => { - return await callback(client.db(datasource.config!.db)) - }) - } - - async function withCollection( - callback: (collection: Collection) => Promise - ): Promise { - return await withDb(async db => { - return await callback(db.collection(collection)) - }) - } - - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - }) - - beforeEach(async () => { - collection = generator.guid() - await withCollection(async collection => { - await collection.insertMany([ - { name: "one" }, - { name: "two" }, - { name: "three" }, - { name: "four" }, - { name: "five" }, - ]) - }) - }) - - afterEach(async () => { - await withCollection(collection => collection.drop()) - }) - - describe("preview", () => { - it("should generate a nested schema with an empty array", async () => { - const name = generator.guid() - await withCollection( - async collection => await collection.insertOne({ name, nested: [] }) - ) - - const preview = await config.api.query.preview({ + async function createQuery(query: Partial): Promise { + const defaultQuery: Query = { + datasourceId: datasource._id!, name: "New Query", - datasourceId: datasource._id!, - fields: { - json: { - name: { $eq: name }, - }, - extra: { - collection, - actionType: "findOne", - }, - }, + parameters: [], + fields: {}, schema: {}, queryVerb: "read", - parameters: [], transformer: "return data", readable: true, - }) - - expect(preview).toEqual({ - nestedSchemaFields: {}, - rows: [{ _id: expect.any(String), name, nested: [] }], - schema: { - _id: { - type: "string", - name: "_id", - }, - name: { - type: "string", - name: "name", - }, - nested: { - type: "array", - name: "nested", - }, - }, - }) - }) - - it("should update schema when structure changes from object to array", async () => { - const name = generator.guid() - - await withCollection(async collection => { - await collection.insertOne({ name, field: { subfield: "value" } }) - }) - - const firstPreview = await config.api.query.preview({ - name: "Test Query", - datasourceId: datasource._id!, - fields: { - json: { name: { $eq: name } }, - extra: { - collection, - actionType: "findOne", - }, - }, - schema: {}, - queryVerb: "read", - parameters: [], - transformer: "return data", - readable: true, - }) - - expect(firstPreview.schema).toEqual( - expect.objectContaining({ - field: { type: "json", name: "field" }, - }) - ) - - await withCollection(async collection => { - await collection.updateOne( - { name }, - { $set: { field: ["value1", "value2"] } } - ) - }) - - const secondPreview = await config.api.query.preview({ - name: "Test Query", - datasourceId: datasource._id!, - fields: { - json: { name: { $eq: name } }, - extra: { - collection, - actionType: "findOne", - }, - }, - schema: firstPreview.schema, - queryVerb: "read", - parameters: [], - transformer: "return data", - readable: true, - }) - - expect(secondPreview.schema).toEqual( - expect.objectContaining({ - field: { type: "array", name: "field" }, - }) - ) - }) - - it("should generate a nested schema based on all of the nested items", async () => { - const name = generator.guid() - const item = { - name, - contacts: [ - { - address: "123 Lane", - }, - { - address: "456 Drive", - }, - { - postcode: "BT1 12N", - lat: 54.59, - long: -5.92, - }, - { - city: "Belfast", - }, - { - address: "789 Avenue", - phoneNumber: "0800-999-5555", - }, - { - name: "Name", - isActive: false, - }, - ], } + const combinedQuery = { ...defaultQuery, ...query } + if ( + combinedQuery.fields && + combinedQuery.fields.extra && + !combinedQuery.fields.extra.collection + ) { + combinedQuery.fields.extra.collection = collection + } + return await config.api.query.save(combinedQuery) + } - await withCollection(collection => collection.insertOne(item)) + async function withClient( + callback: (client: MongoClient) => Promise + ): Promise { + const client = new MongoClient(datasource.config!.connectionString) + await client.connect() + try { + return await callback(client) + } finally { + await client.close() + } + } - const preview = await config.api.query.preview({ - name: "New Query", - datasourceId: datasource._id!, - fields: { - json: { - name: { $eq: name }, - }, - extra: { - collection, - actionType: "findOne", - }, - }, - schema: {}, - queryVerb: "read", - parameters: [], - transformer: "return data", - readable: true, + async function withDb(callback: (db: Db) => Promise): Promise { + return await withClient(async client => { + return await callback(client.db(datasource.config!.db)) }) + } - expect(preview).toEqual({ - nestedSchemaFields: { - contacts: { - address: { + async function withCollection( + callback: (collection: Collection) => Promise + ): Promise { + return await withDb(async db => { + return await callback(db.collection(collection)) + }) + } + + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + }) + + beforeEach(async () => { + collection = generator.guid() + await withCollection(async collection => { + await collection.insertMany([ + { name: "one" }, + { name: "two" }, + { name: "three" }, + { name: "four" }, + { name: "five" }, + ]) + }) + }) + + afterEach(async () => { + await withCollection(collection => collection.drop()) + }) + + describe("preview", () => { + it("should generate a nested schema with an empty array", async () => { + const name = generator.guid() + await withCollection( + async collection => await collection.insertOne({ name, nested: [] }) + ) + + const preview = await config.api.query.preview({ + name: "New Query", + datasourceId: datasource._id!, + fields: { + json: { + name: { $eq: name }, + }, + extra: { + collection, + actionType: "findOne", + }, + }, + schema: {}, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(preview).toEqual({ + nestedSchemaFields: {}, + rows: [{ _id: expect.any(String), name, nested: [] }], + schema: { + _id: { type: "string", - name: "address", - }, - postcode: { - type: "string", - name: "postcode", - }, - lat: { - type: "number", - name: "lat", - }, - long: { - type: "number", - name: "long", - }, - city: { - type: "string", - name: "city", - }, - phoneNumber: { - type: "string", - name: "phoneNumber", + name: "_id", }, name: { type: "string", name: "name", }, - isActive: { - type: "boolean", - name: "isActive", + nested: { + type: "array", + name: "nested", }, }, - }, - rows: [{ ...item, _id: expect.any(String) }], - schema: { - _id: { type: "string", name: "_id" }, - name: { type: "string", name: "name" }, - contacts: { type: "json", name: "contacts", subtype: "array" }, - }, - }) - }) - }) - - describe("execute", () => { - it("a count query", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "count", - }, - }, + }) }) - const result = await config.api.query.execute(query._id!) + it("should update schema when structure changes from object to array", async () => { + const name = generator.guid() - expect(result.data).toEqual([{ value: 5 }]) - }) + await withCollection(async collection => { + await collection.insertOne({ name, field: { subfield: "value" } }) + }) - it("should be able to updateOne by ObjectId", async () => { - const insertResult = await withCollection(c => - c.insertOne({ name: "one" }) - ) - const query = await createQuery({ - fields: { - json: { - filter: { - _id: { $eq: `ObjectId("${insertResult.insertedId}")` }, + const firstPreview = await config.api.query.preview({ + name: "Test Query", + datasourceId: datasource._id!, + fields: { + json: { name: { $eq: name } }, + extra: { + collection, + actionType: "findOne", }, - update: { $set: { name: "newName" } }, }, - extra: { - actionType: "updateOne", + schema: {}, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(firstPreview.schema).toEqual( + expect.objectContaining({ + field: { type: "json", name: "field" }, + }) + ) + + await withCollection(async collection => { + await collection.updateOne( + { name }, + { $set: { field: ["value1", "value2"] } } + ) + }) + + const secondPreview = await config.api.query.preview({ + name: "Test Query", + datasourceId: datasource._id!, + fields: { + json: { name: { $eq: name } }, + extra: { + collection, + actionType: "findOne", + }, }, - }, - queryVerb: "update", + schema: firstPreview.schema, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(secondPreview.schema).toEqual( + expect.objectContaining({ + field: { type: "array", name: "field" }, + }) + ) }) - const result = await config.api.query.execute(query._id!) + it("should generate a nested schema based on all of the nested items", async () => { + const name = generator.guid() + const item = { + name, + contacts: [ + { + address: "123 Lane", + }, + { + address: "456 Drive", + }, + { + postcode: "BT1 12N", + lat: 54.59, + long: -5.92, + }, + { + city: "Belfast", + }, + { + address: "789 Avenue", + phoneNumber: "0800-999-5555", + }, + { + name: "Name", + isActive: false, + }, + ], + } - expect(result.data).toEqual([ - { - acknowledged: true, - matchedCount: 1, - modifiedCount: 1, - upsertedCount: 0, - upsertedId: null, - }, - ]) + await withCollection(collection => collection.insertOne(item)) - await withCollection(async collection => { - const doc = await collection.findOne({ name: { $eq: "newName" } }) - expect(doc).toEqual({ - _id: insertResult.insertedId, - name: "newName", + const preview = await config.api.query.preview({ + name: "New Query", + datasourceId: datasource._id!, + fields: { + json: { + name: { $eq: name }, + }, + extra: { + collection, + actionType: "findOne", + }, + }, + schema: {}, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(preview).toEqual({ + nestedSchemaFields: { + contacts: { + address: { + type: "string", + name: "address", + }, + postcode: { + type: "string", + name: "postcode", + }, + lat: { + type: "number", + name: "lat", + }, + long: { + type: "number", + name: "long", + }, + city: { + type: "string", + name: "city", + }, + phoneNumber: { + type: "string", + name: "phoneNumber", + }, + name: { + type: "string", + name: "name", + }, + isActive: { + type: "boolean", + name: "isActive", + }, + }, + }, + rows: [{ ...item, _id: expect.any(String) }], + schema: { + _id: { type: "string", name: "_id" }, + name: { type: "string", name: "name" }, + contacts: { type: "json", name: "contacts", subtype: "array" }, + }, }) }) }) - it("a count query with a transformer", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "count", + describe("execute", () => { + it("a count query", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "count", + }, }, - }, - transformer: "return data + 1", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([{ value: 5 }]) }) - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([{ value: 6 }]) - }) - - it("a find query", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "find", + it("should be able to updateOne by ObjectId", async () => { + const insertResult = await withCollection(c => + c.insertOne({ name: "one" }) + ) + const query = await createQuery({ + fields: { + json: { + filter: { + _id: { $eq: `ObjectId("${insertResult.insertedId}")` }, + }, + update: { $set: { name: "newName" } }, + }, + extra: { + actionType: "updateOne", + }, }, - }, + queryVerb: "update", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + acknowledged: true, + matchedCount: 1, + modifiedCount: 1, + upsertedCount: 0, + upsertedId: null, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ name: { $eq: "newName" } }) + expect(doc).toEqual({ + _id: insertResult.insertedId, + name: "newName", + }) + }) }) - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([ - { _id: expectValidId, name: "one" }, - { _id: expectValidId, name: "two" }, - { _id: expectValidId, name: "three" }, - { _id: expectValidId, name: "four" }, - { _id: expectValidId, name: "five" }, - ]) - }) - - it("a findOne query", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "findOne", + it("a count query with a transformer", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "count", + }, }, - }, + transformer: "return data + 1", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([{ value: 6 }]) }) - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([{ _id: expectValidId, name: "one" }]) - }) - - it("a findOneAndUpdate query", async () => { - const query = await createQuery({ - fields: { - json: { - filter: { name: { $eq: "one" } }, - update: { $set: { name: "newName" } }, + it("a find query", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "find", + }, }, - extra: { - actionType: "findOneAndUpdate", - }, - }, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { _id: expectValidId, name: "one" }, + { _id: expectValidId, name: "two" }, + { _id: expectValidId, name: "three" }, + { _id: expectValidId, name: "four" }, + { _id: expectValidId, name: "five" }, + ]) }) - const result = await config.api.query.execute(query._id!) + it("a findOne query", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "findOne", + }, + }, + }) - expect(result.data).toEqual([ - { - lastErrorObject: { n: 1, updatedExisting: true }, - ok: 1, - value: { _id: expectValidId, name: "one" }, - }, - ]) + const result = await config.api.query.execute(query._id!) - await withCollection(async collection => { - expect(await collection.countDocuments()).toBe(5) + expect(result.data).toEqual([{ _id: expectValidId, name: "one" }]) + }) - const doc = await collection.findOne({ name: { $eq: "newName" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - name: "newName", + it("a findOneAndUpdate query", async () => { + const query = await createQuery({ + fields: { + json: { + filter: { name: { $eq: "one" } }, + update: { $set: { name: "newName" } }, + }, + extra: { + actionType: "findOneAndUpdate", + }, + }, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + lastErrorObject: { n: 1, updatedExisting: true }, + ok: 1, + value: { _id: expectValidId, name: "one" }, + }, + ]) + + await withCollection(async collection => { + expect(await collection.countDocuments()).toBe(5) + + const doc = await collection.findOne({ name: { $eq: "newName" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + name: "newName", + }) + }) + }) + + it("a distinct query", async () => { + const query = await createQuery({ + fields: { + json: "name", + extra: { + actionType: "distinct", + }, + }, + }) + + const result = await config.api.query.execute(query._id!) + const values = result.data.map(o => o.value).sort() + expect(values).toEqual(["five", "four", "one", "three", "two"]) + }) + + it("a create query with parameters", async () => { + const query = await createQuery({ + fields: { + json: { foo: "{{ foo }}" }, + extra: { + actionType: "insertOne", + }, + }, + queryVerb: "create", + parameters: [ + { + name: "foo", + default: "default", + }, + ], + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { foo: "bar" }, + }) + + expect(result.data).toEqual([ + { + acknowledged: true, + insertedId: expectValidId, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ foo: { $eq: "bar" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + foo: "bar", + }) + }) + }) + + it("a delete query with parameters", async () => { + const query = await createQuery({ + fields: { + json: { name: { $eq: "{{ name }}" } }, + extra: { + actionType: "deleteOne", + }, + }, + queryVerb: "delete", + parameters: [ + { + name: "name", + default: "", + }, + ], + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { name: "one" }, + }) + + expect(result.data).toEqual([ + { + acknowledged: true, + deletedCount: 1, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ name: { $eq: "one" } }) + expect(doc).toBeNull() + }) + }) + + it("an update query with parameters", async () => { + const query = await createQuery({ + fields: { + json: { + filter: { name: { $eq: "{{ name }}" } }, + update: { $set: { name: "{{ newName }}" } }, + }, + extra: { + actionType: "updateOne", + }, + }, + queryVerb: "update", + parameters: [ + { + name: "name", + default: "", + }, + { + name: "newName", + default: "", + }, + ], + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { name: "one", newName: "newOne" }, + }) + + expect(result.data).toEqual([ + { + acknowledged: true, + matchedCount: 1, + modifiedCount: 1, + upsertedCount: 0, + upsertedId: null, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ name: { $eq: "newOne" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + name: "newOne", + }) + + const oldDoc = await collection.findOne({ name: { $eq: "one" } }) + expect(oldDoc).toBeNull() + }) + }) + + it("should be able to delete all records", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "deleteMany", + }, + }, + queryVerb: "delete", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + acknowledged: true, + deletedCount: 5, + }, + ]) + + await withCollection(async collection => { + const docs = await collection.find().toArray() + expect(docs).toHaveLength(0) + }) + }) + + it("should be able to update all documents", async () => { + const query = await createQuery({ + fields: { + json: { + filter: {}, + update: { $set: { name: "newName" } }, + }, + extra: { + actionType: "updateMany", + }, + }, + queryVerb: "update", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + acknowledged: true, + matchedCount: 5, + modifiedCount: 5, + upsertedCount: 0, + upsertedId: null, + }, + ]) + + await withCollection(async collection => { + const docs = await collection.find().toArray() + expect(docs).toHaveLength(5) + for (const doc of docs) { + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + name: "newName", + }) + } }) }) }) - it("a distinct query", async () => { - const query = await createQuery({ - fields: { - json: "name", - extra: { - actionType: "distinct", - }, - }, - }) - - const result = await config.api.query.execute(query._id!) - const values = result.data.map(o => o.value).sort() - expect(values).toEqual(["five", "four", "one", "three", "two"]) + it("should throw an error if the incorrect actionType is specified", async () => { + const verbs = ["read", "create", "update", "delete"] + for (const verb of verbs) { + const query = await createQuery({ + fields: { json: {}, extra: { actionType: "invalid" } }, + queryVerb: verb, + }) + await config.api.query.execute(query._id!, undefined, { status: 400 }) + } }) - it("a create query with parameters", async () => { + it("should ignore extra brackets in query", async () => { const query = await createQuery({ fields: { - json: { foo: "{{ foo }}" }, + json: { foo: "te}st" }, extra: { actionType: "insertOne", }, }, queryVerb: "create", - parameters: [ + }) + + const result = await config.api.query.execute(query._id!) + expect(result.data).toEqual([ + { + acknowledged: true, + insertedId: expectValidId, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ foo: { $eq: "te}st" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + foo: "te}st", + }) + }) + }) + + it("should be able to save deeply nested data", async () => { + const data = { + foo: "bar", + data: [ + { cid: 1 }, + { cid: 2 }, { - name: "foo", - default: "default", + nested: { + name: "test", + ary: [1, 2, 3], + aryOfObjects: [{ a: 1 }, { b: 2 }], + }, }, ], + } + const query = await createQuery({ + fields: { + json: data, + extra: { + actionType: "insertOne", + }, + }, + queryVerb: "create", }) - const result = await config.api.query.execute(query._id!, { - parameters: { foo: "bar" }, - }) - + const result = await config.api.query.execute(query._id!) expect(result.data).toEqual([ { acknowledged: true, @@ -479,239 +712,10 @@ datasourceDescribe( const doc = await collection.findOne({ foo: { $eq: "bar" } }) expect(doc).toEqual({ _id: expectValidBsonObjectId, - foo: "bar", + ...data, }) }) }) - - it("a delete query with parameters", async () => { - const query = await createQuery({ - fields: { - json: { name: { $eq: "{{ name }}" } }, - extra: { - actionType: "deleteOne", - }, - }, - queryVerb: "delete", - parameters: [ - { - name: "name", - default: "", - }, - ], - }) - - const result = await config.api.query.execute(query._id!, { - parameters: { name: "one" }, - }) - - expect(result.data).toEqual([ - { - acknowledged: true, - deletedCount: 1, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ name: { $eq: "one" } }) - expect(doc).toBeNull() - }) - }) - - it("an update query with parameters", async () => { - const query = await createQuery({ - fields: { - json: { - filter: { name: { $eq: "{{ name }}" } }, - update: { $set: { name: "{{ newName }}" } }, - }, - extra: { - actionType: "updateOne", - }, - }, - queryVerb: "update", - parameters: [ - { - name: "name", - default: "", - }, - { - name: "newName", - default: "", - }, - ], - }) - - const result = await config.api.query.execute(query._id!, { - parameters: { name: "one", newName: "newOne" }, - }) - - expect(result.data).toEqual([ - { - acknowledged: true, - matchedCount: 1, - modifiedCount: 1, - upsertedCount: 0, - upsertedId: null, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ name: { $eq: "newOne" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - name: "newOne", - }) - - const oldDoc = await collection.findOne({ name: { $eq: "one" } }) - expect(oldDoc).toBeNull() - }) - }) - - it("should be able to delete all records", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "deleteMany", - }, - }, - queryVerb: "delete", - }) - - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([ - { - acknowledged: true, - deletedCount: 5, - }, - ]) - - await withCollection(async collection => { - const docs = await collection.find().toArray() - expect(docs).toHaveLength(0) - }) - }) - - it("should be able to update all documents", async () => { - const query = await createQuery({ - fields: { - json: { - filter: {}, - update: { $set: { name: "newName" } }, - }, - extra: { - actionType: "updateMany", - }, - }, - queryVerb: "update", - }) - - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([ - { - acknowledged: true, - matchedCount: 5, - modifiedCount: 5, - upsertedCount: 0, - upsertedId: null, - }, - ]) - - await withCollection(async collection => { - const docs = await collection.find().toArray() - expect(docs).toHaveLength(5) - for (const doc of docs) { - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - name: "newName", - }) - } - }) - }) - }) - - it("should throw an error if the incorrect actionType is specified", async () => { - const verbs = ["read", "create", "update", "delete"] - for (const verb of verbs) { - const query = await createQuery({ - fields: { json: {}, extra: { actionType: "invalid" } }, - queryVerb: verb, - }) - await config.api.query.execute(query._id!, undefined, { status: 400 }) - } - }) - - it("should ignore extra brackets in query", async () => { - const query = await createQuery({ - fields: { - json: { foo: "te}st" }, - extra: { - actionType: "insertOne", - }, - }, - queryVerb: "create", - }) - - const result = await config.api.query.execute(query._id!) - expect(result.data).toEqual([ - { - acknowledged: true, - insertedId: expectValidId, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ foo: { $eq: "te}st" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - foo: "te}st", - }) - }) - }) - - it("should be able to save deeply nested data", async () => { - const data = { - foo: "bar", - data: [ - { cid: 1 }, - { cid: 2 }, - { - nested: { - name: "test", - ary: [1, 2, 3], - aryOfObjects: [{ a: 1 }, { b: 2 }], - }, - }, - ], - } - const query = await createQuery({ - fields: { - json: data, - extra: { - actionType: "insertOne", - }, - }, - queryVerb: "create", - }) - - const result = await config.api.query.execute(query._id!) - expect(result.data).toEqual([ - { - acknowledged: true, - insertedId: expectValidId, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ foo: { $eq: "bar" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - ...data, - }) - }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 5bdd341beb..02995e6d0a 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -85,619 +85,315 @@ function encodeJS(binding: string) { return `{{ js "${Buffer.from(binding).toString("base64")}"}}` } -datasourceDescribe( - { name: "/rows (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, dsProvider, isInternal, isMSSQL, isOracle }) => { - let table: Table - let datasource: Datasource | undefined - let client: Knex | undefined +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - client = ds.client - }) +if (descriptions.length) { + describe.each(descriptions)( + "/rows ($dbName)", + ({ config, dsProvider, isInternal, isMSSQL, isOracle }) => { + let table: Table + let datasource: Datasource | undefined + let client: Knex | undefined - afterAll(async () => { - setup.afterAll() - }) - - function saveTableRequest( - // We omit the name field here because it's generated in the function with a - // high likelihood to be unique. Tests should not have any reason to control - // the table name they're writing to. - ...overrides: Partial>[] - ): SaveTableRequest { - const defaultSchema: TableSchema = { - id: { - type: FieldType.NUMBER, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - } - - for (const override of overrides) { - if (override.primary) { - delete defaultSchema.id - } - } - - const req: SaveTableRequest = { - name: uuid.v4().substring(0, 10), - type: "table", - sourceType: datasource - ? TableSourceType.EXTERNAL - : TableSourceType.INTERNAL, - sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, - primary: ["id"], - schema: defaultSchema, - } - const merged = merge(req, ...overrides) - return merged - } - - function defaultTable( - // We omit the name field here because it's generated in the function with a - // high likelihood to be unique. Tests should not have any reason to control - // the table name they're writing to. - ...overrides: Partial>[] - ): SaveTableRequest { - return saveTableRequest( - { - primaryDisplay: "name", - schema: { - name: { - type: FieldType.STRING, - name: "name", - constraints: { - type: "string", - }, - }, - description: { - type: FieldType.STRING, - name: "description", - constraints: { - type: "string", - }, - }, - }, - }, - ...overrides - ) - } - - beforeEach(async () => { - mocks.licenses.useCloudFree() - }) - - const getRowUsage = async () => { - const { total } = await config.doInContext(undefined, () => - quotas.getCurrentUsageValues( - QuotaUsageType.STATIC, - StaticQuotaName.ROWS - ) - ) - return total - } - - const assertRowUsage = async (expected: number) => { - const usage = await getRowUsage() - - // Because our quota tracking is not perfect, we allow a 10% margin of - // error. This is to account for the fact that parallel writes can result - // in some quota updates getting lost. We don't have any need to solve this - // right now, so we just allow for some error. - if (expected === 0) { - expect(usage).toEqual(0) - return - } - expect(usage).toBeGreaterThan(expected * 0.9) - expect(usage).toBeLessThan(expected * 1.1) - } - - const defaultRowFields = isInternal - ? { - type: "row", - createdAt: timestamp, - updatedAt: timestamp, - } - : undefined - - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - describe("create", () => { - it("creates a new row successfully", async () => { - const rowUsage = await getRowUsage() - const row = await config.api.row.save(table._id!, { - name: "Test Contact", - }) - expect(row.name).toEqual("Test Contact") - expect(row._rev).toBeDefined() - await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage) + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource + client = ds.client }) - it("fails to create a row for a table that does not exist", async () => { - const rowUsage = await getRowUsage() - await config.api.row.save("1234567", {}, { status: 404 }) - await assertRowUsage(rowUsage) + afterAll(async () => { + setup.afterAll() }) - it("fails to create a row if required fields are missing", async () => { - const rowUsage = await getRowUsage() - const table = await config.api.table.save( - saveTableRequest({ - schema: { - required: { - type: FieldType.STRING, - name: "required", - constraints: { - type: "string", - presence: true, - }, - }, + function saveTableRequest( + // We omit the name field here because it's generated in the function with a + // high likelihood to be unique. Tests should not have any reason to control + // the table name they're writing to. + ...overrides: Partial>[] + ): SaveTableRequest { + const defaultSchema: TableSchema = { + id: { + type: FieldType.NUMBER, + name: "id", + autocolumn: true, + constraints: { + presence: true, }, - }) - ) - await config.api.row.save( - table._id!, - {}, + }, + } + + for (const override of overrides) { + if (override.primary) { + delete defaultSchema.id + } + } + + const req: SaveTableRequest = { + name: uuid.v4().substring(0, 10), + type: "table", + sourceType: datasource + ? TableSourceType.EXTERNAL + : TableSourceType.INTERNAL, + sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, + primary: ["id"], + schema: defaultSchema, + } + const merged = merge(req, ...overrides) + return merged + } + + function defaultTable( + // We omit the name field here because it's generated in the function with a + // high likelihood to be unique. Tests should not have any reason to control + // the table name they're writing to. + ...overrides: Partial>[] + ): SaveTableRequest { + return saveTableRequest( { - status: 500, - body: { - validationErrors: { - required: ["can't be blank"], - }, - }, - } - ) - await assertRowUsage(rowUsage) - }) - - isInternal && - it("increment row autoId per create row request", async () => { - const rowUsage = await getRowUsage() - - const newTable = await config.api.table.save( - saveTableRequest({ - schema: { - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, - constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, - }, - }, - }, - }) - ) - - let previousId = 0 - for (let i = 0; i < 10; i++) { - const row = await config.api.row.save(newTable._id!, {}) - expect(row["Row ID"]).toBeGreaterThan(previousId) - previousId = row["Row ID"] - } - await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage) - }) - - isInternal && - it("should increment auto ID correctly when creating rows in parallel", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, - constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, - }, - }, - }, - }) - ) - - const sequence = Array(50) - .fill(0) - .map((_, i) => i + 1) - - // This block of code is simulating users creating auto ID rows at the - // same time. It's expected that this operation will sometimes return - // a document conflict error (409), but the idea is to retry in those - // situations. The code below does this a large number of times with - // small, random delays between them to try and get through the list - // as quickly as possible. - await Promise.all( - sequence.map(async () => { - const attempts = 30 - for (let attempt = 0; attempt < attempts; attempt++) { - try { - await config.api.row.save(table._id!, {}) - return - } catch (e) { - await new Promise(r => setTimeout(r, Math.random() * 50)) - } - } - throw new Error(`Failed to create row after ${attempts} attempts`) - }) - ) - - const rows = await config.api.row.fetch(table._id!) - expect(rows).toHaveLength(50) - - // The main purpose of this test is to ensure that even under pressure, - // we maintain data integrity. An auto ID column should hand out - // monotonically increasing unique integers no matter what. - const ids = rows.map(r => r["Row ID"]) - expect(ids).toEqual(expect.arrayContaining(sequence)) - }) - - isInternal && - it("doesn't allow creating in user table", async () => { - const response = await config.api.row.save( - InternalTable.USER_METADATA, - { - firstName: "Joe", - lastName: "Joe", - email: "joe@joe.com", - roles: {}, - }, - { status: 400 } - ) - expect(response.message).toBe("Cannot create new user entry.") - }) - - it("should not mis-parse date string out of JSON", async () => { - const table = await config.api.table.save( - saveTableRequest({ + primaryDisplay: "name", schema: { name: { type: FieldType.STRING, name: "name", + constraints: { + type: "string", + }, + }, + description: { + type: FieldType.STRING, + name: "description", + constraints: { + type: "string", + }, }, }, - }) + }, + ...overrides ) + } - const row = await config.api.row.save(table._id!, { - name: `{ "foo": "2023-01-26T11:48:57.000Z" }`, - }) - - expect(row.name).toEqual(`{ "foo": "2023-01-26T11:48:57.000Z" }`) + beforeEach(async () => { + mocks.licenses.useCloudFree() }) - describe("default values", () => { - let table: Table + const getRowUsage = async () => { + const { total } = await config.doInContext(undefined, () => + quotas.getCurrentUsageValues( + QuotaUsageType.STATIC, + StaticQuotaName.ROWS + ) + ) + return total + } - describe("string column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - description: { - name: "description", - type: FieldType.STRING, - default: "default description", - }, - }, - }) - ) - }) + const assertRowUsage = async (expected: number) => { + const usage = await getRowUsage() - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.description).toEqual("default description") - }) + // Because our quota tracking is not perfect, we allow a 10% margin of + // error. This is to account for the fact that parallel writes can result + // in some quota updates getting lost. We don't have any need to solve this + // right now, so we just allow for some error. + if (expected === 0) { + expect(usage).toEqual(0) + return + } + expect(usage).toBeGreaterThan(expected * 0.9) + expect(usage).toBeLessThan(expected * 1.1) + } - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - description: "specified description", - }) - expect(row.description).toEqual("specified description") - }) + const defaultRowFields = isInternal + ? { + type: "row", + createdAt: timestamp, + updatedAt: timestamp, + } + : undefined - it("uses the default value if value is null", async () => { - const row = await config.api.row.save(table._id!, { - description: null, - }) - expect(row.description).toEqual("default description") - }) + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) - it("uses the default value if value is undefined", async () => { - const row = await config.api.row.save(table._id!, { - description: undefined, - }) - expect(row.description).toEqual("default description") + describe("create", () => { + it("creates a new row successfully", async () => { + const rowUsage = await getRowUsage() + const row = await config.api.row.save(table._id!, { + name: "Test Contact", }) + expect(row.name).toEqual("Test Contact") + expect(row._rev).toBeDefined() + await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage) }) - describe("number column", () => { - beforeAll(async () => { - table = await config.api.table.save( + it("fails to create a row for a table that does not exist", async () => { + const rowUsage = await getRowUsage() + await config.api.row.save("1234567", {}, { status: 404 }) + await assertRowUsage(rowUsage) + }) + + it("fails to create a row if required fields are missing", async () => { + const rowUsage = await getRowUsage() + const table = await config.api.table.save( + saveTableRequest({ + schema: { + required: { + type: FieldType.STRING, + name: "required", + constraints: { + type: "string", + presence: true, + }, + }, + }, + }) + ) + await config.api.row.save( + table._id!, + {}, + { + status: 500, + body: { + validationErrors: { + required: ["can't be blank"], + }, + }, + } + ) + await assertRowUsage(rowUsage) + }) + + isInternal && + it("increment row autoId per create row request", async () => { + const rowUsage = await getRowUsage() + + const newTable = await config.api.table.save( saveTableRequest({ schema: { - age: { - name: "age", + "Row ID": { + name: "Row ID", type: FieldType.NUMBER, - default: "25", + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, }, }, }) ) + + let previousId = 0 + for (let i = 0; i < 10; i++) { + const row = await config.api.row.save(newTable._id!, {}) + expect(row["Row ID"]).toBeGreaterThan(previousId) + previousId = row["Row ID"] + } + await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage) }) - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.age).toEqual(25) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - age: 30, - }) - expect(row.age).toEqual(30) - }) - }) - - describe("date column", () => { - it("creates a row with a default value successfully", async () => { + isInternal && + it("should increment auto ID correctly when creating rows in parallel", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { - date: { - name: "date", - type: FieldType.DATETIME, - default: "2023-01-26T11:48:57.000Z", + "Row ID": { + name: "Row ID", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, }, }, }) ) - const row = await config.api.row.save(table._id!, {}) - expect(row.date).toEqual("2023-01-26T11:48:57.000Z") + + const sequence = Array(50) + .fill(0) + .map((_, i) => i + 1) + + // This block of code is simulating users creating auto ID rows at the + // same time. It's expected that this operation will sometimes return + // a document conflict error (409), but the idea is to retry in those + // situations. The code below does this a large number of times with + // small, random delays between them to try and get through the list + // as quickly as possible. + await Promise.all( + sequence.map(async () => { + const attempts = 30 + for (let attempt = 0; attempt < attempts; attempt++) { + try { + await config.api.row.save(table._id!, {}) + return + } catch (e) { + await new Promise(r => setTimeout(r, Math.random() * 50)) + } + } + throw new Error( + `Failed to create row after ${attempts} attempts` + ) + }) + ) + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(50) + + // The main purpose of this test is to ensure that even under pressure, + // we maintain data integrity. An auto ID column should hand out + // monotonically increasing unique integers no matter what. + const ids = rows.map(r => r["Row ID"]) + expect(ids).toEqual(expect.arrayContaining(sequence)) }) - it("gives an error if the default value is invalid", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - date: { - name: "date", - type: FieldType.DATETIME, - default: "invalid", - }, - }, - }) - ) - await config.api.row.save( - table._id!, - {}, + isInternal && + it("doesn't allow creating in user table", async () => { + const response = await config.api.row.save( + InternalTable.USER_METADATA, { - status: 400, - body: { - message: `Invalid default value for field 'date' - Invalid date value: "invalid"`, - }, - } + firstName: "Joe", + lastName: "Joe", + email: "joe@joe.com", + roles: {}, + }, + { status: 400 } ) + expect(response.message).toBe("Cannot create new user entry.") }) + + it("should not mis-parse date string out of JSON", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) + ) + + const row = await config.api.row.save(table._id!, { + name: `{ "foo": "2023-01-26T11:48:57.000Z" }`, + }) + + expect(row.name).toEqual(`{ "foo": "2023-01-26T11:48:57.000Z" }`) }) - describe("options column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - status: { - name: "status", - type: FieldType.OPTIONS, - default: "requested", - constraints: { - inclusion: ["requested", "approved"], - }, - }, - }, - }) - ) - }) + describe("default values", () => { + let table: Table - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.status).toEqual("requested") - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - status: "approved", - }) - expect(row.status).toEqual("approved") - }) - }) - - describe("array column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - food: { - name: "food", - type: FieldType.ARRAY, - default: ["apple", "orange"], - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["apple", "orange", "banana"], - }, - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.food).toEqual(["apple", "orange"]) - }) - - it("creates a new row with a default value when given an empty list", async () => { - const row = await config.api.row.save(table._id!, { food: [] }) - expect(row.food).toEqual(["apple", "orange"]) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - food: ["orange"], - }) - expect(row.food).toEqual(["orange"]) - }) - - it("resets back to its default value when empty", async () => { - let row = await config.api.row.save(table._id!, { - food: ["orange"], - }) - row = await config.api.row.save(table._id!, { ...row, food: [] }) - expect(row.food).toEqual(["apple", "orange"]) - }) - }) - - describe("user column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - default: "{{ [Current User]._id }}", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.user._id).toEqual(config.getUser()._id) - }) - - it("does not use default value if value specified", async () => { - const id = `us_${utils.newid()}` - await config.createUser({ _id: id }) - const row = await config.api.row.save(table._id!, { - user: id, - }) - expect(row.user._id).toEqual(id) - }) - }) - - describe("multi-user column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - default: ["{{ [Current User]._id }}"], - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.users).toHaveLength(1) - expect(row.users[0]._id).toEqual(config.getUser()._id) - }) - - it("does not use default value if value specified", async () => { - const id = `us_${utils.newid()}` - await config.createUser({ _id: id }) - const row = await config.api.row.save(table._id!, { - users: [id], - }) - expect(row.users).toHaveLength(1) - expect(row.users[0]._id).toEqual(id) - }) - }) - - describe("boolean column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - active: { - name: "active", - type: FieldType.BOOLEAN, - default: "true", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.active).toEqual(true) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - active: false, - }) - expect(row.active).toEqual(false) - }) - }) - - describe("bigint column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - bigNumber: { - name: "bigNumber", - type: FieldType.BIGINT, - default: "1234567890", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.bigNumber).toEqual("1234567890") - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - bigNumber: "9876543210", - }) - expect(row.bigNumber).toEqual("9876543210") - }) - }) - - describe("bindings", () => { describe("string column", () => { beforeAll(async () => { table = await config.api.table.save( @@ -706,18 +402,16 @@ datasourceDescribe( description: { name: "description", type: FieldType.STRING, - default: `{{ date now "YYYY-MM-DDTHH:mm:ss" }}`, + default: "default description", }, }, }) ) }) - it("can use bindings in default values", async () => { + it("creates a new row with a default value successfully", async () => { const row = await config.api.row.save(table._id!, {}) - expect(row.description).toMatch( - /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/ - ) + expect(row.description).toEqual("default description") }) it("does not use default value if value specified", async () => { @@ -727,38 +421,18 @@ datasourceDescribe( expect(row.description).toEqual("specified description") }) - it("can bind the current user", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.STRING, - default: `{{ [Current User]._id }}`, - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - expect(row.user).toEqual(config.getUser()._id) + it("uses the default value if value is null", async () => { + const row = await config.api.row.save(table._id!, { + description: null, + }) + expect(row.description).toEqual("default description") }) - it("cannot access current user password", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.STRING, - default: `{{ user.password }}`, - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - // For some reason it's null for internal tables, and undefined for - // external. - expect(row.user == null).toBe(true) + it("uses the default value if value is undefined", async () => { + const row = await config.api.row.save(table._id!, { + description: undefined, + }) + expect(row.description).toEqual("default description") }) }) @@ -770,19 +444,330 @@ datasourceDescribe( age: { name: "age", type: FieldType.NUMBER, - default: `{{ sum 10 10 5 }}`, + default: "25", }, }, }) ) }) - it("can use bindings in default values", async () => { + it("creates a new row with a default value successfully", async () => { const row = await config.api.row.save(table._id!, {}) expect(row.age).toEqual(25) }) - describe("invalid default value", () => { + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + age: 30, + }) + expect(row.age).toEqual(30) + }) + }) + + describe("date column", () => { + it("creates a row with a default value successfully", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + date: { + name: "date", + type: FieldType.DATETIME, + default: "2023-01-26T11:48:57.000Z", + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + expect(row.date).toEqual("2023-01-26T11:48:57.000Z") + }) + + it("gives an error if the default value is invalid", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + date: { + name: "date", + type: FieldType.DATETIME, + default: "invalid", + }, + }, + }) + ) + await config.api.row.save( + table._id!, + {}, + { + status: 400, + body: { + message: `Invalid default value for field 'date' - Invalid date value: "invalid"`, + }, + } + ) + }) + }) + + describe("options column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + status: { + name: "status", + type: FieldType.OPTIONS, + default: "requested", + constraints: { + inclusion: ["requested", "approved"], + }, + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.status).toEqual("requested") + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + status: "approved", + }) + expect(row.status).toEqual("approved") + }) + }) + + describe("array column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + food: { + name: "food", + type: FieldType.ARRAY, + default: ["apple", "orange"], + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["apple", "orange", "banana"], + }, + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.food).toEqual(["apple", "orange"]) + }) + + it("creates a new row with a default value when given an empty list", async () => { + const row = await config.api.row.save(table._id!, { food: [] }) + expect(row.food).toEqual(["apple", "orange"]) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + food: ["orange"], + }) + expect(row.food).toEqual(["orange"]) + }) + + it("resets back to its default value when empty", async () => { + let row = await config.api.row.save(table._id!, { + food: ["orange"], + }) + row = await config.api.row.save(table._id!, { ...row, food: [] }) + expect(row.food).toEqual(["apple", "orange"]) + }) + }) + + describe("user column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + default: "{{ [Current User]._id }}", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.user._id).toEqual(config.getUser()._id) + }) + + it("does not use default value if value specified", async () => { + const id = `us_${utils.newid()}` + await config.createUser({ _id: id }) + const row = await config.api.row.save(table._id!, { + user: id, + }) + expect(row.user._id).toEqual(id) + }) + }) + + describe("multi-user column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + default: ["{{ [Current User]._id }}"], + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.users).toHaveLength(1) + expect(row.users[0]._id).toEqual(config.getUser()._id) + }) + + it("does not use default value if value specified", async () => { + const id = `us_${utils.newid()}` + await config.createUser({ _id: id }) + const row = await config.api.row.save(table._id!, { + users: [id], + }) + expect(row.users).toHaveLength(1) + expect(row.users[0]._id).toEqual(id) + }) + }) + + describe("boolean column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + active: { + name: "active", + type: FieldType.BOOLEAN, + default: "true", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.active).toEqual(true) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + active: false, + }) + expect(row.active).toEqual(false) + }) + }) + + describe("bigint column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + bigNumber: { + name: "bigNumber", + type: FieldType.BIGINT, + default: "1234567890", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.bigNumber).toEqual("1234567890") + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + bigNumber: "9876543210", + }) + expect(row.bigNumber).toEqual("9876543210") + }) + }) + + describe("bindings", () => { + describe("string column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + description: { + name: "description", + type: FieldType.STRING, + default: `{{ date now "YYYY-MM-DDTHH:mm:ss" }}`, + }, + }, + }) + ) + }) + + it("can use bindings in default values", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.description).toMatch( + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/ + ) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + description: "specified description", + }) + expect(row.description).toEqual("specified description") + }) + + it("can bind the current user", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.STRING, + default: `{{ [Current User]._id }}`, + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + expect(row.user).toEqual(config.getUser()._id) + }) + + it("cannot access current user password", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.STRING, + default: `{{ user.password }}`, + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + // For some reason it's null for internal tables, and undefined for + // external. + expect(row.user == null).toBe(true) + }) + }) + + describe("number column", () => { beforeAll(async () => { table = await config.api.table.save( saveTableRequest({ @@ -790,1030 +775,890 @@ datasourceDescribe( age: { name: "age", type: FieldType.NUMBER, - default: `{{ capitalize "invalid" }}`, + default: `{{ sum 10 10 5 }}`, }, }, }) ) }) - it("throws an error when invalid default value", async () => { - await config.api.row.save( - table._id!, - {}, - { - status: 400, - body: { - message: - "Invalid default value for field 'age' - Invalid number value \"Invalid\"", - }, - } - ) + it("can use bindings in default values", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.age).toEqual(25) + }) + + describe("invalid default value", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + age: { + name: "age", + type: FieldType.NUMBER, + default: `{{ capitalize "invalid" }}`, + }, + }, + }) + ) + }) + + it("throws an error when invalid default value", async () => { + await config.api.row.save( + table._id!, + {}, + { + status: 400, + body: { + message: + "Invalid default value for field 'age' - Invalid number value \"Invalid\"", + }, + } + ) + }) }) }) }) }) - }) - describe("relations to same table", () => { - let relatedRows: Row[] + describe("relations to same table", () => { + let relatedRows: Row[] - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, - }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - - it("can create rows with both relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related1: [ - { - _id: relatedRows[0]._id, - primaryDisplay: relatedRows[0].name, - }, - ], - related2: [ - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ], - }) - ) - }) - - it("can create rows with no relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - }) - - expect(row.related1).toBeUndefined() - expect(row.related2).toBeUndefined() - }) - - it("can create rows with only one relationships field", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [], - related2: [relatedRows[1]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related2: [ - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ], - }) - ) - expect(row.related1).toBeUndefined() - }) - }) - }) - - describe("get", () => { - it("reads an existing row successfully", async () => { - const existing = await config.api.row.save(table._id!, {}) - - const res = await config.api.row.get(table._id!, existing._id!) - - expect(res).toEqual({ - ...existing, - ...defaultRowFields, - }) - }) - - it("returns 404 when row does not exist", async () => { - const table = await config.api.table.save(defaultTable()) - await config.api.row.save(table._id!, {}) - await config.api.row.get(table._id!, "1234567", { - status: 404, - }) - }) - - isInternal && - it("can search row from user table", async () => { - const res = await config.api.row.get( - InternalTables.USER_METADATA, - config.userMetadataId! - ) - - expect(res).toEqual({ - ...config.getUser(), - _id: config.userMetadataId!, - _rev: expect.any(String), - roles: undefined, - roleId: "ADMIN", - tableId: InternalTables.USER_METADATA, - }) - }) - }) - - describe("fetch", () => { - it("fetches all rows for given tableId", async () => { - const table = await config.api.table.save(defaultTable()) - const rows = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - - const res = await config.api.row.fetch(table._id!) - expect(res.map(r => r._id)).toEqual( - expect.arrayContaining(rows.map(r => r._id)) - ) - }) - - it("returns 404 when table does not exist", async () => { - await config.api.row.fetch("1234567", { status: 404 }) - }) - }) - - describe("update", () => { - it("updates an existing row successfully", async () => { - const existing = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.save(table._id!, { - _id: existing._id, - _rev: existing._rev, - name: "Updated Name", - }) - - expect(res.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - !isInternal && - it("can update a row on an external table with a primary key", async () => { - const tableName = uuid.v4().substring(0, 10) - await client!.schema.createTable(tableName, table => { - table.increments("id").primary() - table.string("name") - }) - - const res = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = res.datasource.entities![tableName] - - const row = await config.api.row.save(table._id!, { - id: 1, - name: "Row 1", - }) - - const updatedRow = await config.api.row.save(table._id!, { - _id: row._id!, - name: "Row 1 Updated", - }) - - expect(updatedRow.name).toEqual("Row 1 Updated") - - const rows = await config.api.row.fetch(table._id!) - expect(rows).toHaveLength(1) - }) - - describe("relations to same table", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, - }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - - it("can edit rows with both relationships", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - row = await config.api.row.save(table._id!, { - ...row, - related1: [relatedRows[0]._id!, relatedRows[1]._id!], - related2: [relatedRows[2]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related1: expect.arrayContaining([ - { - _id: relatedRows[0]._id, - primaryDisplay: relatedRows[0].name, - }, - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ]), - related2: [ - { - _id: relatedRows[2]._id, - primaryDisplay: relatedRows[2].name, - }, - ], - }) - ) - }) - - it("can drop existing relationship", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - row = await config.api.row.save(table._id!, { - ...row, - related1: [], - related2: [relatedRows[2]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related2: [ - { - _id: relatedRows[2]._id, - primaryDisplay: relatedRows[2].name, - }, - ], - }) - ) - expect(row.related1).toBeUndefined() - }) - - it("can drop both relationships", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - row = await config.api.row.save(table._id!, { - ...row, - related1: [], - related2: [], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - }) - ) - expect(row.related1).toBeUndefined() - expect(row.related2).toBeUndefined() - }) - }) - }) - - describe("patch", () => { - let otherTable: Table - - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - otherTable = await config.api.table.save( - defaultTable({ - schema: { - relationship: { - name: "relationship", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: table._id!, - fieldName: "relationship", - }, - }, - }) - ) - }) - - it("should update only the fields that are supplied", async () => { - const existing = await config.api.row.save(table._id!, {}) - - const rowUsage = await getRowUsage() - - const row = await config.api.row.patch(table._id!, { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: "Updated Name", - }) - - expect(row.name).toEqual("Updated Name") - expect(row.description).toEqual(existing.description) - - const savedRow = await config.api.row.get(table._id!, row._id!) - - expect(savedRow.description).toEqual(existing.description) - expect(savedRow.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - it("should update only the fields that are supplied and emit the correct oldRow", async () => { - let beforeRow = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - const opts = { - name: "row:update", - matchFn: (event: UpdatedRowEventEmitter) => - event.row._id === beforeRow._id, - } - const event = await waitForEvent(opts, async () => { - await config.api.row.patch(table._id!, { - _id: beforeRow._id!, - _rev: beforeRow._rev!, - tableId: table._id!, - name: "Updated Name", - }) - }) - - expect(event.oldRow).toBeDefined() - expect(event.oldRow.name).toEqual("test") - expect(event.row.name).toEqual("Updated Name") - expect(event.oldRow.description).toEqual(beforeRow.description) - expect(event.row.description).toEqual(beforeRow.description) - }) - - it("should throw an error when given improper types", async () => { - const existing = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - await config.api.row.patch( - table._id!, - { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: 1, - }, - { status: 400 } - ) - - await assertRowUsage(rowUsage) - }) - - it("should not overwrite links if those links are not set", async () => { - let linkField: FieldSchema = { - type: FieldType.LINK, - name: "", - fieldName: "", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: InternalTable.USER_METADATA, - } - - let table = await config.api.table.save({ - name: "TestTable", - type: "table", - sourceType: TableSourceType.INTERNAL, - sourceId: INTERNAL_TABLE_SOURCE_ID, - schema: { - user1: { ...linkField, name: "user1", fieldName: "user1" }, - user2: { ...linkField, name: "user2", fieldName: "user2" }, - }, - }) - - let user1 = await config.createUser() - let user2 = await config.createUser() - - let row = await config.api.row.save(table._id!, { - user1: [{ _id: user1._id }], - user2: [{ _id: user2._id }], - }) - - let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.user1[0]._id).toEqual(user1._id) - expect(getResp.user2[0]._id).toEqual(user2._id) - - let patchResp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: table._id!, - user1: [{ _id: user2._id }], - }) - expect(patchResp.user1[0]._id).toEqual(user2._id) - expect(patchResp.user2[0]._id).toEqual(user2._id) - - getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.user1[0]._id).toEqual(user2._id) - expect(getResp.user2[0]._id).toEqual(user2._id) - }) - - it("should be able to remove a relationship from many side", async () => { - const row = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - }) - const row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - }) - const { _id } = await config.api.row.save(table._id!, { - relationship: [{ _id: row._id }, { _id: row2._id }], - }) - const relatedRow = await config.api.row.get(table._id!, _id!, { - status: 200, - }) - expect(relatedRow.relationship.length).toEqual(2) - await config.api.row.save(table._id!, { - ...relatedRow, - relationship: [{ _id: row._id }], - }) - const afterRelatedRow = await config.api.row.get(table._id!, _id!, { - status: 200, - }) - expect(afterRelatedRow.relationship.length).toEqual(1) - expect(afterRelatedRow.relationship[0]._id).toEqual(row._id) - }) - - it("should be able to update relationships when both columns are same name", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - let row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - relationship: [row._id], - }) - row = await config.api.row.get(table._id!, row._id!) - expect(row.relationship.length).toBe(1) - const resp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: row.tableId!, - name: "test2", - relationship: [row2._id], - }) - expect(resp.relationship.length).toBe(1) - }) - - !isInternal && - // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing - // to identity columns. This is not something Budibase does currently. - !isMSSQL && - it("should support updating fields that are part of a composite key", async () => { - const tableRequest = saveTableRequest({ - primary: ["number", "string"], - schema: { - string: { - type: FieldType.STRING, - name: "string", - }, - number: { - type: FieldType.NUMBER, - name: "number", - }, - }, - }) - - delete tableRequest.schema.id - - const table = await config.api.table.save(tableRequest) - - const stringValue = generator.word() - - // MySQL and MariaDB auto-increment fields have a minimum value of 1. If - // you try to save a row with a value of 0 it will use 1 instead. - const naturalValue = generator.integer({ min: 1, max: 1000 }) - - const existing = await config.api.row.save(table._id!, { - string: stringValue, - number: naturalValue, - }) - - expect(existing._id).toEqual( - `%5B${naturalValue}%2C'${stringValue}'%5D` - ) - - const row = await config.api.row.patch(table._id!, { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - string: stringValue, - number: 1500, - }) - - expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`) - }) - }) - - describe("destroy", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should be able to delete a row", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow], - }) - expect(res[0]._id).toEqual(createdRow._id) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it("should be able to delete a row with ID only", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow._id!], - }) - expect(res[0]._id).toEqual(createdRow._id) - expect(res[0].tableId).toEqual(table._id!) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it("should be able to bulk delete rows, including a row that doesn't exist", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const createdRow2 = await config.api.row.save(table._id!, {}) - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow, createdRow2, { _id: "9999999" }], - }) - - expect(res.map(r => r._id)).toEqual( - expect.arrayContaining([createdRow._id, createdRow2._id]) - ) - expect(res.length).toEqual(2) - }) - - describe("relations to same table", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, - }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - - it("can delete rows with both relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - await config.api.row.delete(table._id!, { _id: row._id! }) - - await config.api.row.get(table._id!, row._id!, { status: 404 }) - }) - - it("can delete rows with empty relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [], - related2: [], - }) - - await config.api.row.delete(table._id!, { _id: row._id! }) - - await config.api.row.get(table._id!, row._id!, { status: 404 }) - }) - }) - }) - - describe("validate", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should return no errors on valid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: "ivan" }) - - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - await assertRowUsage(rowUsage) - }) - - it("should errors on invalid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: 1 }) - - if (isInternal) { - expect(res.valid).toBe(false) - expect(Object.keys(res.errors)).toEqual(["name"]) - } else { - // Validation for external is not implemented, so it will always return valid - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - } - await assertRowUsage(rowUsage) - }) - }) - - describe("bulkDelete", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should be able to delete a bulk set of rows", async () => { - const row1 = await config.api.row.save(table._id!, {}) - const row2 = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [row1, row2], - }) - - expect(res.length).toEqual(2) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) - }) - - it("should be able to delete a variety of row set types", async () => { - const [row1, row2, row3] = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [row1, row2._id!, { _id: row3._id }], - }) - - expect(res.length).toEqual(3) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage) - }) - - it("should accept a valid row object and delete the row", async () => { - const row1 = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, row1 as DeleteRow) - - expect(res.id).toEqual(row1._id) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( - "should ignore malformed/invalid delete request: %s", - async (request: any) => { - const rowUsage = await getRowUsage() - - await config.api.row.delete(table._id!, request, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await assertRowUsage(rowUsage) - } - ) - }) - - describe("bulkImport", () => { - isInternal && - it("should update Auto ID field after bulk import", async () => { - const table = await config.api.table.save( - saveTableRequest({ - primary: ["autoId"], - schema: { - autoId: { - name: "autoId", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - autocolumn: true, - constraints: { - type: "number", - presence: false, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, }, }, - }, - }) - ) - - let row = await config.api.row.save(table._id!, {}) - expect(row.autoId).toEqual(1) - - await config.api.row.bulkImport(table._id!, { - rows: [{ autoId: 2 }], + }) + ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) }) - row = await config.api.row.save(table._id!, {}) - expect(row.autoId).toEqual(3) + it("can create rows with both relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related1: [ + { + _id: relatedRows[0]._id, + primaryDisplay: relatedRows[0].name, + }, + ], + related2: [ + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ], + }) + ) + }) + + it("can create rows with no relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + }) + + expect(row.related1).toBeUndefined() + expect(row.related2).toBeUndefined() + }) + + it("can create rows with only one relationships field", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [], + related2: [relatedRows[1]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related2: [ + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ], + }) + ) + expect(row.related1).toBeUndefined() + }) + }) + }) + + describe("get", () => { + it("reads an existing row successfully", async () => { + const existing = await config.api.row.save(table._id!, {}) + + const res = await config.api.row.get(table._id!, existing._id!) + + expect(res).toEqual({ + ...existing, + ...defaultRowFields, + }) }) - isInternal && - it("should reject bulkImporting relationship fields", async () => { - const table1 = await config.api.table.save(saveTableRequest()) - const table2 = await config.api.table.save( - saveTableRequest({ + it("returns 404 when row does not exist", async () => { + const table = await config.api.table.save(defaultTable()) + await config.api.row.save(table._id!, {}) + await config.api.row.get(table._id!, "1234567", { + status: 404, + }) + }) + + isInternal && + it("can search row from user table", async () => { + const res = await config.api.row.get( + InternalTables.USER_METADATA, + config.userMetadataId! + ) + + expect(res).toEqual({ + ...config.getUser(), + _id: config.userMetadataId!, + _rev: expect.any(String), + roles: undefined, + roleId: "ADMIN", + tableId: InternalTables.USER_METADATA, + }) + }) + }) + + describe("fetch", () => { + it("fetches all rows for given tableId", async () => { + const table = await config.api.table.save(defaultTable()) + const rows = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + + const res = await config.api.row.fetch(table._id!) + expect(res.map(r => r._id)).toEqual( + expect.arrayContaining(rows.map(r => r._id)) + ) + }) + + it("returns 404 when table does not exist", async () => { + await config.api.row.fetch("1234567", { status: 404 }) + }) + }) + + describe("update", () => { + it("updates an existing row successfully", async () => { + const existing = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.save(table._id!, { + _id: existing._id, + _rev: existing._rev, + name: "Updated Name", + }) + + expect(res.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + !isInternal && + it("can update a row on an external table with a primary key", async () => { + const tableName = uuid.v4().substring(0, 10) + await client!.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = res.datasource.entities![tableName] + + const row = await config.api.row.save(table._id!, { + id: 1, + name: "Row 1", + }) + + const updatedRow = await config.api.row.save(table._id!, { + _id: row._id!, + name: "Row 1 Updated", + }) + + expect(updatedRow.name).toEqual("Row 1 Updated") + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(1) + }) + + describe("relations to same table", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }, + }) + ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) + }) + + it("can edit rows with both relationships", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [relatedRows[0]._id!, relatedRows[1]._id!], + related2: [relatedRows[2]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related1: expect.arrayContaining([ + { + _id: relatedRows[0]._id, + primaryDisplay: relatedRows[0].name, + }, + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ]), + related2: [ + { + _id: relatedRows[2]._id, + primaryDisplay: relatedRows[2].name, + }, + ], + }) + ) + }) + + it("can drop existing relationship", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [], + related2: [relatedRows[2]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related2: [ + { + _id: relatedRows[2]._id, + primaryDisplay: relatedRows[2].name, + }, + ], + }) + ) + expect(row.related1).toBeUndefined() + }) + + it("can drop both relationships", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [], + related2: [], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + }) + ) + expect(row.related1).toBeUndefined() + expect(row.related2).toBeUndefined() + }) + }) + }) + + describe("patch", () => { + let otherTable: Table + + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + otherTable = await config.api.table.save( + defaultTable({ schema: { relationship: { name: "relationship", - type: FieldType.LINK, - tableId: table1._id!, relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: table._id!, fieldName: "relationship", }, }, }) ) - - const table1Row1 = await config.api.row.save(table1._id!, {}) - await config.api.row.bulkImport( - table2._id!, - { - rows: [{ relationship: [table1Row1._id!] }], - }, - { - status: 400, - body: { - message: - 'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"', - }, - } - ) }) - it("should be able to bulkImport rows", async () => { - const table = await config.api.table.save( - saveTableRequest({ + it("should update only the fields that are supplied", async () => { + const existing = await config.api.row.save(table._id!, {}) + + const rowUsage = await getRowUsage() + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: "Updated Name", + }) + + expect(row.name).toEqual("Updated Name") + expect(row.description).toEqual(existing.description) + + const savedRow = await config.api.row.get(table._id!, row._id!) + + expect(savedRow.description).toEqual(existing.description) + expect(savedRow.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + it("should update only the fields that are supplied and emit the correct oldRow", async () => { + let beforeRow = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + const opts = { + name: "row:update", + matchFn: (event: UpdatedRowEventEmitter) => + event.row._id === beforeRow._id, + } + const event = await waitForEvent(opts, async () => { + await config.api.row.patch(table._id!, { + _id: beforeRow._id!, + _rev: beforeRow._rev!, + tableId: table._id!, + name: "Updated Name", + }) + }) + + expect(event.oldRow).toBeDefined() + expect(event.oldRow.name).toEqual("test") + expect(event.row.name).toEqual("Updated Name") + expect(event.oldRow.description).toEqual(beforeRow.description) + expect(event.row.description).toEqual(beforeRow.description) + }) + + it("should throw an error when given improper types", async () => { + const existing = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + await config.api.row.patch( + table._id!, + { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: 1, + }, + { status: 400 } + ) + + await assertRowUsage(rowUsage) + }) + + it("should not overwrite links if those links are not set", async () => { + let linkField: FieldSchema = { + type: FieldType.LINK, + name: "", + fieldName: "", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: InternalTable.USER_METADATA, + } + + let table = await config.api.table.save({ + name: "TestTable", + type: "table", + sourceType: TableSourceType.INTERNAL, + sourceId: INTERNAL_TABLE_SOURCE_ID, schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, + user1: { ...linkField, name: "user1", fieldName: "user1" }, + user2: { ...linkField, name: "user2", fieldName: "user2" }, }, }) - ) - const rowUsage = await getRowUsage() + let user1 = await config.createUser() + let user2 = await config.createUser() - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], + let row = await config.api.row.save(table._id!, { + user1: [{ _id: user1._id }], + user2: [{ _id: user2._id }], + }) + + let getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.user1[0]._id).toEqual(user1._id) + expect(getResp.user2[0]._id).toEqual(user2._id) + + let patchResp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: table._id!, + user1: [{ _id: user2._id }], + }) + expect(patchResp.user1[0]._id).toEqual(user2._id) + expect(patchResp.user2[0]._id).toEqual(user2._id) + + getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.user1[0]._id).toEqual(user2._id) + expect(getResp.user2[0]._id).toEqual(user2._id) }) - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(2) + it("should be able to remove a relationship from many side", async () => { + const row = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + }) + const row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + }) + const { _id } = await config.api.row.save(table._id!, { + relationship: [{ _id: row._id }, { _id: row2._id }], + }) + const relatedRow = await config.api.row.get(table._id!, _id!, { + status: 200, + }) + expect(relatedRow.relationship.length).toEqual(2) + await config.api.row.save(table._id!, { + ...relatedRow, + relationship: [{ _id: row._id }], + }) + const afterRelatedRow = await config.api.row.get(table._id!, _id!, { + status: 200, + }) + expect(afterRelatedRow.relationship.length).toEqual(1) + expect(afterRelatedRow.relationship[0]._id).toEqual(row._id) + }) - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1") - expect(rows[0].description).toEqual("Row 1 description") - expect(rows[1].name).toEqual("Row 2") - expect(rows[1].description).toEqual("Row 2 description") + it("should be able to update relationships when both columns are same name", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + let row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + relationship: [row._id], + }) + row = await config.api.row.get(table._id!, row._id!) + expect(row.relationship.length).toBe(1) + const resp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: row.tableId!, + name: "test2", + relationship: [row2._id], + }) + expect(resp.relationship.length).toBe(1) + }) - await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) + !isInternal && + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. + !isMSSQL && + it("should support updating fields that are part of a composite key", async () => { + const tableRequest = saveTableRequest({ + primary: ["number", "string"], + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + number: { + type: FieldType.NUMBER, + name: "number", + }, + }, + }) + + delete tableRequest.schema.id + + const table = await config.api.table.save(tableRequest) + + const stringValue = generator.word() + + // MySQL and MariaDB auto-increment fields have a minimum value of 1. If + // you try to save a row with a value of 0 it will use 1 instead. + const naturalValue = generator.integer({ min: 1, max: 1000 }) + + const existing = await config.api.row.save(table._id!, { + string: stringValue, + number: naturalValue, + }) + + expect(existing._id).toEqual( + `%5B${naturalValue}%2C'${stringValue}'%5D` + ) + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + string: stringValue, + number: 1500, + }) + + expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`) + }) }) - isInternal && - it("should be able to update existing rows on bulkImport", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const existingRow = await config.api.row.save(table._id!, { - name: "Existing row", - description: "Existing description", - }) - - const rowUsage = await getRowUsage() - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { ...existingRow, name: "Updated existing row" }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], - identifierFields: ["_id"], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1") - expect(rows[0].description).toEqual("Row 1 description") - expect(rows[1].name).toEqual("Row 2") - expect(rows[1].description).toEqual("Row 2 description") - expect(rows[2].name).toEqual("Updated existing row") - expect(rows[2].description).toEqual("Existing description") - - await assertRowUsage(rowUsage + 2) + describe("destroy", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) }) - isInternal && - it("should create new rows if not identifierFields are provided", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const existingRow = await config.api.row.save(table._id!, { - name: "Existing row", - description: "Existing description", - }) - + it("should be able to delete a row", async () => { + const createdRow = await config.api.row.save(table._id!, {}) const rowUsage = await getRowUsage() - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { ...existingRow, name: "Updated existing row" }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow], }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(4) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Existing row") - expect(rows[0].description).toEqual("Existing description") - expect(rows[1].name).toEqual("Row 1") - expect(rows[1].description).toEqual("Row 1 description") - expect(rows[2].name).toEqual("Row 2") - expect(rows[2].description).toEqual("Row 2 description") - expect(rows[3].name).toEqual("Updated existing row") - expect(rows[3].description).toEqual("Existing description") - - await assertRowUsage(rowUsage + 3) + expect(res[0]._id).toEqual(createdRow._id) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) }) - // Upserting isn't yet supported in MSSQL / Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - it("should be able to update existing rows with bulkImport", async () => { - const table = await config.api.table.save( - saveTableRequest({ - primary: ["userId"], - schema: { - userId: { - type: FieldType.NUMBER, - name: "userId", - constraints: { - presence: true, + it("should be able to delete a row with ID only", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow._id!], + }) + expect(res[0]._id).toEqual(createdRow._id) + expect(res[0].tableId).toEqual(table._id!) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it("should be able to bulk delete rows, including a row that doesn't exist", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const createdRow2 = await config.api.row.save(table._id!, {}) + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow, createdRow2, { _id: "9999999" }], + }) + + expect(res.map(r => r._id)).toEqual( + expect.arrayContaining([createdRow._id, createdRow2._id]) + ) + expect(res.length).toEqual(2) + }) + + describe("relations to same table", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, }, }, + }) + ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) + }) + + it("can delete rows with both relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + await config.api.row.delete(table._id!, { _id: row._id! }) + + await config.api.row.get(table._id!, row._id!, { status: 404 }) + }) + + it("can delete rows with empty relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [], + related2: [], + }) + + await config.api.row.delete(table._id!, { _id: row._id! }) + + await config.api.row.get(table._id!, row._id!, { status: 404 }) + }) + }) + }) + + describe("validate", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + it("should return no errors on valid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { + name: "ivan", + }) + + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + await assertRowUsage(rowUsage) + }) + + it("should errors on invalid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { name: 1 }) + + if (isInternal) { + expect(res.valid).toBe(false) + expect(Object.keys(res.errors)).toEqual(["name"]) + } else { + // Validation for external is not implemented, so it will always return valid + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + } + await assertRowUsage(rowUsage) + }) + }) + + describe("bulkDelete", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + it("should be able to delete a bulk set of rows", async () => { + const row1 = await config.api.row.save(table._id!, {}) + const row2 = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2], + }) + + expect(res.length).toEqual(2) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) + }) + + it("should be able to delete a variety of row set types", async () => { + const [row1, row2, row3] = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2._id!, { _id: row3._id }], + }) + + expect(res.length).toEqual(3) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage) + }) + + it("should accept a valid row object and delete the row", async () => { + const row1 = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, row1 as DeleteRow) + + expect(res.id).toEqual(row1._id) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( + "should ignore malformed/invalid delete request: %s", + async (request: any) => { + const rowUsage = await getRowUsage() + + await config.api.row.delete(table._id!, request, { + status: 400, + body: { + message: "Invalid delete rows request", + }, + }) + + await assertRowUsage(rowUsage) + } + ) + }) + + describe("bulkImport", () => { + isInternal && + it("should update Auto ID field after bulk import", async () => { + const table = await config.api.table.save( + saveTableRequest({ + primary: ["autoId"], + schema: { + autoId: { + name: "autoId", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + constraints: { + type: "number", + presence: false, + }, + }, + }, + }) + ) + + let row = await config.api.row.save(table._id!, {}) + expect(row.autoId).toEqual(1) + + await config.api.row.bulkImport(table._id!, { + rows: [{ autoId: 2 }], + }) + + row = await config.api.row.save(table._id!, {}) + expect(row.autoId).toEqual(3) + }) + + isInternal && + it("should reject bulkImporting relationship fields", async () => { + const table1 = await config.api.table.save(saveTableRequest()) + const table2 = await config.api.table.save( + saveTableRequest({ + schema: { + relationship: { + name: "relationship", + type: FieldType.LINK, + tableId: table1._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + fieldName: "relationship", + }, + }, + }) + ) + + const table1Row1 = await config.api.row.save(table1._id!, {}) + await config.api.row.bulkImport( + table2._id!, + { + rows: [{ relationship: [table1Row1._id!] }], + }, + { + status: 400, + body: { + message: + 'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"', + }, + } + ) + }) + + it("should be able to bulkImport rows", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { name: { type: FieldType.STRING, name: "name", @@ -1826,156 +1671,17 @@ datasourceDescribe( }) ) - const row1 = await config.api.row.save(table._id!, { - userId: 1, - name: "Row 1", - description: "Row 1 description", - }) - - const row2 = await config.api.row.save(table._id!, { - userId: 2, - name: "Row 2", - description: "Row 2 description", - }) + const rowUsage = await getRowUsage() await config.api.row.bulkImport(table._id!, { - identifierFields: ["userId"], rows: [ { - userId: row1.userId, - name: "Row 1 updated", - description: "Row 1 description updated", + name: "Row 1", + description: "Row 1 description", }, { - userId: row2.userId, - name: "Row 2 updated", - description: "Row 2 description updated", - }, - { - userId: 3, - name: "Row 3", - description: "Row 3 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1 updated") - expect(rows[0].description).toEqual("Row 1 description updated") - expect(rows[1].name).toEqual("Row 2 updated") - expect(rows[1].description).toEqual("Row 2 description updated") - expect(rows[2].name).toEqual("Row 3") - expect(rows[2].description).toEqual("Row 3 description") - }) - - // Upserting isn't yet supported in MSSQL or Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - !isInternal && - it("should be able to update existing rows with composite primary keys with bulkImport", async () => { - const tableName = uuid.v4() - await client?.schema.createTable(tableName, table => { - table.integer("companyId") - table.integer("userId") - table.string("name") - table.string("description") - table.primary(["companyId", "userId"]) - }) - - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = resp.datasource.entities![tableName] - - const row1 = await config.api.row.save(table._id!, { - companyId: 1, - userId: 1, - name: "Row 1", - description: "Row 1 description", - }) - - const row2 = await config.api.row.save(table._id!, { - companyId: 1, - userId: 2, - name: "Row 2", - description: "Row 2 description", - }) - - await config.api.row.bulkImport(table._id!, { - identifierFields: ["companyId", "userId"], - rows: [ - { - companyId: 1, - userId: row1.userId, - name: "Row 1 updated", - description: "Row 1 description updated", - }, - { - companyId: 1, - userId: row2.userId, - name: "Row 2 updated", - description: "Row 2 description updated", - }, - { - companyId: 1, - userId: 3, - name: "Row 3", - description: "Row 3 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1 updated") - expect(rows[0].description).toEqual("Row 1 description updated") - expect(rows[1].name).toEqual("Row 2 updated") - expect(rows[1].description).toEqual("Row 2 description updated") - expect(rows[2].name).toEqual("Row 3") - expect(rows[2].description).toEqual("Row 3 description") - }) - - // Upserting isn't yet supported in MSSQL/Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - !isInternal && - it("should be able to update existing rows an autoID primary key", async () => { - const tableName = uuid.v4() - await client!.schema.createTable(tableName, table => { - table.increments("userId").primary() - table.string("name") - }) - - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = resp.datasource.entities![tableName] - - const row1 = await config.api.row.save(table._id!, { - name: "Clare", - }) - - const row2 = await config.api.row.save(table._id!, { - name: "Jeff", - }) - - await config.api.row.bulkImport(table._id!, { - identifierFields: ["userId"], - rows: [ - { - userId: row1.userId, - name: "Clare updated", - }, - { - userId: row2.userId, - name: "Jeff updated", + name: "Row 2", + description: "Row 2 description", }, ], }) @@ -1984,1397 +1690,1664 @@ datasourceDescribe( expect(rows.length).toEqual(2) rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Clare updated") - expect(rows[1].name).toEqual("Jeff updated") + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + + await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) }) - }) - describe("enrich", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should allow enriching some linked rows", async () => { - const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( - config.getTenantId(), - async () => { - const linkedTable = await config.api.table.save( - defaultTable({ + isInternal && + it("should be able to update existing rows on bulkImport", async () => { + const table = await config.api.table.save( + saveTableRequest({ schema: { - link: { - name: "link", - fieldName: "link", - type: FieldType.LINK, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: table._id!, + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", }, }, }) ) - const firstRow = await config.api.row.save(table._id!, { - name: "Test Contact", - description: "original description", + + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", }) - const secondRow = await config.api.row.save(linkedTable._id!, { - name: "Test 2", - description: "og desc", - link: [{ _id: firstRow._id }], + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { ...existingRow, name: "Updated existing row" }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + identifierFields: ["_id"], }) - return { linkedTable, firstRow, secondRow } - } - ) - const rowUsage = await getRowUsage() - // test basic enrichment - const resBasic = await config.api.row.get( - linkedTable._id!, - secondRow._id! - ) - expect(resBasic.link.length).toBe(1) - expect(resBasic.link[0]).toEqual({ - _id: firstRow._id, - primaryDisplay: firstRow.name, - }) + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) - // test full enrichment - const resEnriched = await config.api.row.getEnriched( - linkedTable._id!, - secondRow._id! - ) - expect(resEnriched.link.length).toBe(1) - expect(resEnriched.link[0]._id).toBe(firstRow._id) - expect(resEnriched.link[0].name).toBe("Test Contact") - expect(resEnriched.link[0].description).toBe("original description") - await assertRowUsage(rowUsage) - }) - }) + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + expect(rows[2].name).toEqual("Updated existing row") + expect(rows[2].description).toEqual("Existing description") - isInternal && - describe("attachments and signatures", () => { - const coreAttachmentEnrichment = async ( - schema: TableSchema, - field: string, - attachmentCfg: string | string[] - ) => { - const testTable = await config.api.table.save( - defaultTable({ - schema, - }) - ) - const attachmentToStoreKey = (attachmentId: string) => { - return { - key: `${config.getAppId()}/attachments/${attachmentId}`, - } - } - const draftRow = { - name: "test", - description: "test", - [field]: - typeof attachmentCfg === "string" - ? attachmentToStoreKey(attachmentCfg) - : attachmentCfg.map(attachmentToStoreKey), - tableId: testTable._id, - } - const row = await config.api.row.save(testTable._id!, draftRow) - - await withEnv({ SELF_HOSTED: "true" }, async () => { - return context.doInAppContext(config.getAppId(), async () => { - const enriched: Row[] = await outputProcessing(testTable, [row]) - const [targetRow] = enriched - const attachmentEntries = Array.isArray(targetRow[field]) - ? targetRow[field] - : [targetRow[field]] - - for (const entry of attachmentEntries) { - const attachmentId = entry.key.split("/").pop() - expect(entry.url.split("?")[0]).toBe( - `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` - ) - } - }) + await assertRowUsage(rowUsage + 2) }) - } - it("should allow enriching single attachment rows", async () => { - await coreAttachmentEnrichment( - { - attachment: { - type: FieldType.ATTACHMENT_SINGLE, - name: "attachment", - constraints: { presence: false }, - }, - }, - "attachment", - `${uuid.v4()}.csv` - ) - }) + isInternal && + it("should create new rows if not identifierFields are provided", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) - it("should allow enriching attachment list rows", async () => { - await coreAttachmentEnrichment( - { - attachments: { - type: FieldType.ATTACHMENTS, - name: "attachments", - constraints: { type: "array", presence: false }, - }, - }, - "attachments", - [`${uuid.v4()}.csv`] - ) - }) + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", + }) - it("should allow enriching signature rows", async () => { - await coreAttachmentEnrichment( - { - signature: { - type: FieldType.SIGNATURE_SINGLE, - name: "signature", - constraints: { presence: false }, - }, - }, - "signature", - `${uuid.v4()}.png` - ) - }) + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { ...existingRow, name: "Updated existing row" }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(4) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Existing row") + expect(rows[0].description).toEqual("Existing description") + expect(rows[1].name).toEqual("Row 1") + expect(rows[1].description).toEqual("Row 1 description") + expect(rows[2].name).toEqual("Row 2") + expect(rows[2].description).toEqual("Row 2 description") + expect(rows[3].name).toEqual("Updated existing row") + expect(rows[3].description).toEqual("Existing description") + + await assertRowUsage(rowUsage + 3) + }) + + // Upserting isn't yet supported in MSSQL / Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + it("should be able to update existing rows with bulkImport", async () => { + const table = await config.api.table.save( + saveTableRequest({ + primary: ["userId"], + schema: { + userId: { + type: FieldType.NUMBER, + name: "userId", + constraints: { + presence: true, + }, + }, + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const row1 = await config.api.row.save(table._id!, { + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL or Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + !isInternal && + it("should be able to update existing rows with composite primary keys with bulkImport", async () => { + const tableName = uuid.v4() + await client?.schema.createTable(tableName, table => { + table.integer("companyId") + table.integer("userId") + table.string("name") + table.string("description") + table.primary(["companyId", "userId"]) + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["companyId", "userId"], + rows: [ + { + companyId: 1, + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + companyId: 1, + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + companyId: 1, + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL/Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + !isInternal && + it("should be able to update existing rows an autoID primary key", async () => { + const tableName = uuid.v4() + await client!.schema.createTable(tableName, table => { + table.increments("userId").primary() + table.string("name") + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + name: "Clare", + }) + + const row2 = await config.api.row.save(table._id!, { + name: "Jeff", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Clare updated", + }, + { + userId: row2.userId, + name: "Jeff updated", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Clare updated") + expect(rows[1].name).toEqual("Jeff updated") + }) }) - describe("exportRows", () => { - beforeEach(async () => { - table = await config.api.table.save(defaultTable()) + describe("enrich", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + it("should allow enriching some linked rows", async () => { + const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( + config.getTenantId(), + async () => { + const linkedTable = await config.api.table.save( + defaultTable({ + schema: { + link: { + name: "link", + fieldName: "link", + type: FieldType.LINK, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: table._id!, + }, + }, + }) + ) + const firstRow = await config.api.row.save(table._id!, { + name: "Test Contact", + description: "original description", + }) + const secondRow = await config.api.row.save(linkedTable._id!, { + name: "Test 2", + description: "og desc", + link: [{ _id: firstRow._id }], + }) + return { linkedTable, firstRow, secondRow } + } + ) + const rowUsage = await getRowUsage() + + // test basic enrichment + const resBasic = await config.api.row.get( + linkedTable._id!, + secondRow._id! + ) + expect(resBasic.link.length).toBe(1) + expect(resBasic.link[0]).toEqual({ + _id: firstRow._id, + primaryDisplay: firstRow.name, + }) + + // test full enrichment + const resEnriched = await config.api.row.getEnriched( + linkedTable._id!, + secondRow._id! + ) + expect(resEnriched.link.length).toBe(1) + expect(resEnriched.link[0]._id).toBe(firstRow._id) + expect(resEnriched.link[0].name).toBe("Test Contact") + expect(resEnriched.link[0].description).toBe("original description") + await assertRowUsage(rowUsage) + }) }) isInternal && - it("should not export internal couchdb fields", async () => { - const existing = await config.api.row.save(table._id!, { - name: generator.guid(), - description: generator.paragraph(), + describe("attachments and signatures", () => { + const coreAttachmentEnrichment = async ( + schema: TableSchema, + field: string, + attachmentCfg: string | string[] + ) => { + const testTable = await config.api.table.save( + defaultTable({ + schema, + }) + ) + const attachmentToStoreKey = (attachmentId: string) => { + return { + key: `${config.getAppId()}/attachments/${attachmentId}`, + } + } + const draftRow = { + name: "test", + description: "test", + [field]: + typeof attachmentCfg === "string" + ? attachmentToStoreKey(attachmentCfg) + : attachmentCfg.map(attachmentToStoreKey), + tableId: testTable._id, + } + const row = await config.api.row.save(testTable._id!, draftRow) + + await withEnv({ SELF_HOSTED: "true" }, async () => { + return context.doInAppContext(config.getAppId(), async () => { + const enriched: Row[] = await outputProcessing(testTable, [row]) + const [targetRow] = enriched + const attachmentEntries = Array.isArray(targetRow[field]) + ? targetRow[field] + : [targetRow[field]] + + for (const entry of attachmentEntries) { + const attachmentId = entry.key.split("/").pop() + expect(entry.url.split("?")[0]).toBe( + `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` + ) + } + }) + }) + } + + it("should allow enriching single attachment rows", async () => { + await coreAttachmentEnrichment( + { + attachment: { + type: FieldType.ATTACHMENT_SINGLE, + name: "attachment", + constraints: { presence: false }, + }, + }, + "attachment", + `${uuid.v4()}.csv` + ) }) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], + + it("should allow enriching attachment list rows", async () => { + await coreAttachmentEnrichment( + { + attachments: { + type: FieldType.ATTACHMENTS, + name: "attachments", + constraints: { type: "array", presence: false }, + }, + }, + "attachments", + [`${uuid.v4()}.csv`] + ) }) + + it("should allow enriching signature rows", async () => { + await coreAttachmentEnrichment( + { + signature: { + type: FieldType.SIGNATURE_SINGLE, + name: "signature", + constraints: { presence: false }, + }, + }, + "signature", + `${uuid.v4()}.png` + ) + }) + }) + + describe("exportRows", () => { + beforeEach(async () => { + table = await config.api.table.save(defaultTable()) + }) + + isInternal && + it("should not export internal couchdb fields", async () => { + const existing = await config.api.row.save(table._id!, { + name: generator.guid(), + description: generator.paragraph(), + }) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + expect(Object.keys(row)).toEqual(["_id", "name", "description"]) + }) + + !isInternal && + it("should allow exporting all columns", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure all original columns were exported + expect(Object.keys(row).length).toBe(Object.keys(existing).length) + Object.keys(existing).forEach(key => { + expect(row[key]).toEqual(existing[key]) + }) + }) + + it("should allow exporting without filtering", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!) const results = JSON.parse(res) expect(results.length).toEqual(1) const row = results[0] - expect(Object.keys(row)).toEqual(["_id", "name", "description"]) + expect(row._id).toEqual(existing._id) }) - !isInternal && - it("should allow exporting all columns", async () => { + it("should allow exporting only certain columns", async () => { const existing = await config.api.row.save(table._id!, {}) const res = await config.api.row.exportRows(table._id!, { rows: [existing._id!], + columns: ["_id"], }) const results = JSON.parse(res) expect(results.length).toEqual(1) const row = results[0] - // Ensure all original columns were exported - expect(Object.keys(row).length).toBe(Object.keys(existing).length) - Object.keys(existing).forEach(key => { - expect(row[key]).toEqual(existing[key]) - }) + // Ensure only the _id column was exported + expect(Object.keys(row).length).toEqual(1) + expect(row._id).toEqual(existing._id) }) - it("should allow exporting without filtering", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - expect(row._id).toEqual(existing._id) - }) - - it("should allow exporting only certain columns", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - columns: ["_id"], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure only the _id column was exported - expect(Object.keys(row).length).toEqual(1) - expect(row._id).toEqual(existing._id) - }) - - it("should handle single quotes in row filtering", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [`['${existing._id!}']`], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - expect(row._id).toEqual(existing._id) - }) - - it("should return an error if no table is found", async () => { - const existing = await config.api.row.save(table._id!, {}) - await config.api.row.exportRows( - "1234567", - { rows: [existing._id!] }, - RowExportFormat.JSON, - { status: 404 } - ) - }) - - // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing - // to identity columns. This is not something Budibase does currently. - !isMSSQL && - it("should handle filtering by composite primary keys", async () => { - const tableRequest = saveTableRequest({ - primary: ["number", "string"], - schema: { - string: { - type: FieldType.STRING, - name: "string", - }, - number: { - type: FieldType.NUMBER, - name: "number", - }, - }, - }) - delete tableRequest.schema.id - - const table = await config.api.table.save(tableRequest) - const toCreate = generator - .unique(() => generator.integer({ min: 0, max: 10000 }), 10) - .map(number => ({ number, string: generator.word({ length: 30 }) })) - - const rows = await Promise.all( - toCreate.map(d => config.api.row.save(table._id!, d)) - ) - + it("should handle single quotes in row filtering", async () => { + const existing = await config.api.row.save(table._id!, {}) const res = await config.api.row.exportRows(table._id!, { - rows: _.sampleSize(rows, 3).map(r => r._id!), + rows: [`['${existing._id!}']`], }) const results = JSON.parse(res) - expect(results.length).toEqual(3) + expect(results.length).toEqual(1) + const row = results[0] + expect(row._id).toEqual(existing._id) }) - describe("should allow exporting all column types", () => { - let tableId: string - let expectedRowData: Row - - beforeAll(async () => { - const fullSchema = setup.structures.fullSchemaWithoutLinks({ - allRequired: true, - }) - - const table = await config.api.table.save( - saveTableRequest({ - ...setup.structures.basicTable(), - schema: fullSchema, - primary: ["string"], - }) + it("should return an error if no table is found", async () => { + const existing = await config.api.row.save(table._id!, {}) + await config.api.row.exportRows( + "1234567", + { rows: [existing._id!] }, + RowExportFormat.JSON, + { status: 404 } ) - tableId = table._id! - - const rowValues: Record = { - [FieldType.STRING]: generator.guid(), - [FieldType.LONGFORM]: generator.paragraph(), - [FieldType.OPTIONS]: "option 2", - [FieldType.ARRAY]: ["options 2", "options 4"], - [FieldType.NUMBER]: generator.natural(), - [FieldType.BOOLEAN]: generator.bool(), - [FieldType.DATETIME]: generator.date().toISOString(), - [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], - [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), - [FieldType.FORMULA]: undefined, // generated field - [FieldType.AUTO]: undefined, // generated field - [FieldType.AI]: "LLM Output", - [FieldType.JSON]: { name: generator.guid() }, - [FieldType.INTERNAL]: generator.guid(), - [FieldType.BARCODEQR]: generator.guid(), - [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), - [FieldType.BIGINT]: generator.integer().toString(), - [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], - [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, - } - const row = await config.api.row.save(table._id!, rowValues) - expectedRowData = { - _id: row._id, - [FieldType.STRING]: rowValues[FieldType.STRING], - [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], - [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], - [FieldType.ARRAY]: rowValues[FieldType.ARRAY], - [FieldType.NUMBER]: rowValues[FieldType.NUMBER], - [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], - [FieldType.DATETIME]: rowValues[FieldType.DATETIME], - [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( - (a: any) => - expect.objectContaining({ - ...a, - url: expect.any(String), - }) - ), - [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ - ...rowValues[FieldType.ATTACHMENT_SINGLE], - url: expect.any(String), - }), - [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, - [FieldType.AUTO]: expect.any(Number), - [FieldType.AI]: expect.any(String), - [FieldType.JSON]: rowValues[FieldType.JSON], - [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], - [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], - [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ - ...rowValues[FieldType.SIGNATURE_SINGLE], - url: expect.any(String), - }), - [FieldType.BIGINT]: rowValues[FieldType.BIGINT], - [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( - expect.objectContaining - ), - [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( - rowValues[FieldType.BB_REFERENCE_SINGLE] - ), - } }) - it("as csv", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.CSV - ) - - const jsonResult = await config.api.table.csvToJson({ - csvString: exportedValue, - }) - - const stringified = (value: string) => - JSON.stringify(value).replace(/"/g, "'") - - const matchingObject = ( - key: string, - value: any, - isArray: boolean - ) => { - const objectMatcher = `{'${key}':'${value[key]}'.*?}` - if (isArray) { - return expect.stringMatching( - new RegExp(`^\\[${objectMatcher}\\]$`) - ) - } - return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) - } - - expect(jsonResult).toEqual([ - { - ...expectedRowData, - auto: expect.any(String), - array: stringified(expectedRowData["array"]), - attachment: matchingObject( - "key", - expectedRowData["attachment"][0].sample, - true - ), - attachment_single: matchingObject( - "key", - expectedRowData["attachment_single"].sample, - false - ), - boolean: stringified(expectedRowData["boolean"]), - json: stringified(expectedRowData["json"]), - number: stringified(expectedRowData["number"]), - signature_single: matchingObject( - "key", - expectedRowData["signature_single"].sample, - false - ), - bb_reference: matchingObject( - "_id", - expectedRowData["bb_reference"][0].sample, - true - ), - bb_reference_single: matchingObject( - "_id", - expectedRowData["bb_reference_single"].sample, - false - ), - ai: "LLM Output", - }, - ]) - }) - - it("as json", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.JSON - ) - - const json = JSON.parse(exportedValue) - expect(json).toEqual([expectedRowData]) - }) - - it("as json with schema", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.JSON_WITH_SCHEMA - ) - - const json = JSON.parse(exportedValue) - expect(json).toEqual({ - schema: expect.any(Object), - rows: [expectedRowData], - }) - }) - - it("can handle csv-special characters in strings", async () => { - const badString = 'test":, wow", "test": "wow"' - const table = await config.api.table.save( - saveTableRequest({ + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. + !isMSSQL && + it("should handle filtering by composite primary keys", async () => { + const tableRequest = saveTableRequest({ + primary: ["number", "string"], schema: { string: { type: FieldType.STRING, name: "string", }, + number: { + type: FieldType.NUMBER, + name: "number", + }, }, }) - ) + delete tableRequest.schema.id - await config.api.row.save(table._id!, { string: badString }) + const table = await config.api.table.save(tableRequest) + const toCreate = generator + .unique(() => generator.integer({ min: 0, max: 10000 }), 10) + .map(number => ({ + number, + string: generator.word({ length: 30 }), + })) - const exportedValue = await config.api.row.exportRows( - table._id!, - { query: {} }, - RowExportFormat.CSV - ) + const rows = await Promise.all( + toCreate.map(d => config.api.row.save(table._id!, d)) + ) - const json = await config.api.table.csvToJson( - { + const res = await config.api.row.exportRows(table._id!, { + rows: _.sampleSize(rows, 3).map(r => r._id!), + }) + const results = JSON.parse(res) + expect(results.length).toEqual(3) + }) + + describe("should allow exporting all column types", () => { + let tableId: string + let expectedRowData: Row + + beforeAll(async () => { + const fullSchema = setup.structures.fullSchemaWithoutLinks({ + allRequired: true, + }) + + const table = await config.api.table.save( + saveTableRequest({ + ...setup.structures.basicTable(), + schema: fullSchema, + primary: ["string"], + }) + ) + tableId = table._id! + + const rowValues: Record = { + [FieldType.STRING]: generator.guid(), + [FieldType.LONGFORM]: generator.paragraph(), + [FieldType.OPTIONS]: "option 2", + [FieldType.ARRAY]: ["options 2", "options 4"], + [FieldType.NUMBER]: generator.natural(), + [FieldType.BOOLEAN]: generator.bool(), + [FieldType.DATETIME]: generator.date().toISOString(), + [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], + [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), + [FieldType.FORMULA]: undefined, // generated field + [FieldType.AUTO]: undefined, // generated field + [FieldType.AI]: "LLM Output", + [FieldType.JSON]: { name: generator.guid() }, + [FieldType.INTERNAL]: generator.guid(), + [FieldType.BARCODEQR]: generator.guid(), + [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), + [FieldType.BIGINT]: generator.integer().toString(), + [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], + [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, + } + const row = await config.api.row.save(table._id!, rowValues) + expectedRowData = { + _id: row._id, + [FieldType.STRING]: rowValues[FieldType.STRING], + [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], + [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], + [FieldType.ARRAY]: rowValues[FieldType.ARRAY], + [FieldType.NUMBER]: rowValues[FieldType.NUMBER], + [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], + [FieldType.DATETIME]: rowValues[FieldType.DATETIME], + [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( + (a: any) => + expect.objectContaining({ + ...a, + url: expect.any(String), + }) + ), + [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.ATTACHMENT_SINGLE], + url: expect.any(String), + }), + [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, + [FieldType.AUTO]: expect.any(Number), + [FieldType.AI]: expect.any(String), + [FieldType.JSON]: rowValues[FieldType.JSON], + [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], + [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], + [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.SIGNATURE_SINGLE], + url: expect.any(String), + }), + [FieldType.BIGINT]: rowValues[FieldType.BIGINT], + [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( + expect.objectContaining + ), + [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( + rowValues[FieldType.BB_REFERENCE_SINGLE] + ), + } + }) + + it("as csv", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + const jsonResult = await config.api.table.csvToJson({ csvString: exportedValue, + }) + + const stringified = (value: string) => + JSON.stringify(value).replace(/"/g, "'") + + const matchingObject = ( + key: string, + value: any, + isArray: boolean + ) => { + const objectMatcher = `{'${key}':'${value[key]}'.*?}` + if (isArray) { + return expect.stringMatching( + new RegExp(`^\\[${objectMatcher}\\]$`) + ) + } + return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) + } + + expect(jsonResult).toEqual([ + { + ...expectedRowData, + auto: expect.any(String), + array: stringified(expectedRowData["array"]), + attachment: matchingObject( + "key", + expectedRowData["attachment"][0].sample, + true + ), + attachment_single: matchingObject( + "key", + expectedRowData["attachment_single"].sample, + false + ), + boolean: stringified(expectedRowData["boolean"]), + json: stringified(expectedRowData["json"]), + number: stringified(expectedRowData["number"]), + signature_single: matchingObject( + "key", + expectedRowData["signature_single"].sample, + false + ), + bb_reference: matchingObject( + "_id", + expectedRowData["bb_reference"][0].sample, + true + ), + bb_reference_single: matchingObject( + "_id", + expectedRowData["bb_reference_single"].sample, + false + ), + ai: "LLM Output", + }, + ]) + }) + + it("as json", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual([expectedRowData]) + }) + + it("as json with schema", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON_WITH_SCHEMA + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual({ + schema: expect.any(Object), + rows: [expectedRowData], + }) + }) + + it("can handle csv-special characters in strings", async () => { + const badString = 'test":, wow", "test": "wow"' + const table = await config.api.table.save( + saveTableRequest({ + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + }, + }) + ) + + await config.api.row.save(table._id!, { string: badString }) + + const exportedValue = await config.api.row.exportRows( + table._id!, + { query: {} }, + RowExportFormat.CSV + ) + + const json = await config.api.table.csvToJson( + { + csvString: exportedValue, + }, + { + status: 200, + } + ) + + expect(json).toHaveLength(1) + expect(json[0].string).toEqual(badString) + }) + + it("exported data can be re-imported", async () => { + // export all + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + // import all twice + const rows = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + + const { rows: allRows } = await config.api.row.search(tableId) + + const expectedRow = { + ...expectedRowData, + _id: expect.any(String), + _rev: expect.any(String), + type: "row", + tableId: tableId, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + } + expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) + }) + }) + }) + + let o2mTable: Table + let m2mTable: Table + beforeAll(async () => { + o2mTable = await config.api.table.save(defaultTable()) + m2mTable = await config.api.table.save(defaultTable()) + }) + + describe.each([ + [ + "relationship fields", + (): Record => ({ + user: { + name: "user", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: o2mTable._id!, + fieldName: "fk_o2m", + }, + users: { + name: "users", + relationshipType: RelationshipType.MANY_TO_MANY, + type: FieldType.LINK, + tableId: m2mTable._id!, + fieldName: "fk_m2m", + }, + }), + (tableId: string) => + config.api.row.save(tableId, { + name: uuid.v4(), + description: generator.paragraph(), + tableId, + }), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.name, + }), + ], + [ + "bb reference fields", + (): Record => ({ + user: { + name: "user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + }, + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USERS, + }, + }), + () => config.createUser(), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.email, + email: row.email, + firstName: row.firstName, + lastName: row.lastName, + }), + ], + ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { + let tableId: string + let o2mData: Row[] + let m2mData: Row[] + + beforeAll(async () => { + const table = await config.api.table.save( + defaultTable({ schema: relSchema() }) + ) + tableId = table._id! + + o2mData = [ + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + ] + + m2mData = [ + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + ] + }) + + it("can save a row when relationship fields are empty", async () => { + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + }) + + expect(row).toEqual({ + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + name: "foo", + description: "bar", + tableId, + }) + }) + + it("can save a row with a single relationship field", async () => { + const user = _.sample(o2mData)! + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + user: [user], + }) + + expect(row).toEqual({ + name: "foo", + description: "bar", + tableId, + user: [user].map(u => resultMapper(u)), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can save a row with a multiple relationship field", async () => { + const selectedUsers = _.sampleSize(m2mData, 2) + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: selectedUsers, + }) + + expect(row).toEqual({ + name: "foo", + description: "bar", + tableId, + users: expect.arrayContaining( + selectedUsers.map(u => resultMapper(u)) + ), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("can retrieve rows with no populated relationships", async () => { + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + }) + + const retrieved = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: "foo", + description: "bar", + tableId, + user: undefined, + users: undefined, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + ...defaultRowFields, + }) + }) + + it("can retrieve rows with populated relationships", async () => { + const user1 = _.sample(o2mData)! + const [user2, user3] = _.sampleSize(m2mData, 2) + + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [user2, user3], + user: [user1], + }) + + const retrieved = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: "foo", + description: "bar", + tableId, + user: expect.arrayContaining([user1].map(u => resultMapper(u))), + users: expect.arrayContaining( + [user2, user3].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, + ...defaultRowFields, + }) + }) + + it("can update an existing populated row", async () => { + const user = _.sample(o2mData)! + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [users1, users2], + }) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: [user], + users: [users3, users1], + }) + expect(updatedRow).toEqual({ + name: "foo", + description: "bar", + tableId, + user: expect.arrayContaining([user].map(u => resultMapper(u))), + users: expect.arrayContaining( + [users3, users1].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can wipe an existing populated relationships in row", async () => { + const [user1, user2] = _.sampleSize(m2mData, 2) + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [user1, user2], + }) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: null, + users: null, + }) + expect(updatedRow).toEqual({ + name: "foo", + description: "bar", + tableId, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("fetch all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows = [ + { + name: generator.name(), + description: generator.name(), + users: [users1, users2], }, { - status: 200, + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.fetch(tableId) + + expect(res).toEqual( + expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ) + ) + }) + + it("search all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows = [ + { + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.search(tableId) + + expect(res).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + }), + }) + }) + }) + + // Upserting isn't yet supported in MSSQL or Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + describe("relationships", () => { + let tableId: string + let viewId: string + + let auxData: Row[] = [] + + beforeAll(async () => { + const aux2Table = await config.api.table.save(saveTableRequest()) + const aux2Data = await config.api.row.save(aux2Table._id!, {}) + + const auxTable = await config.api.table.save( + saveTableRequest({ + primaryDisplay: "name", + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + age: { + name: "age", + type: FieldType.NUMBER, + constraints: { presence: true }, + }, + address: { + name: "address", + type: FieldType.STRING, + constraints: { presence: true }, + visible: false, + }, + link: { + name: "link", + type: FieldType.LINK, + tableId: aux2Table._id!, + relationshipType: RelationshipType.MANY_TO_MANY, + fieldName: "fk_aux", + constraints: { presence: true }, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: "{{ any }}", + constraints: { presence: true }, + }, + }, + }) + ) + const auxTableId = auxTable._id! + + for (const name of generator.unique(() => generator.name(), 10)) { + auxData.push( + await config.api.row.save(auxTableId, { + name, + age: generator.age(), + address: generator.address(), + link: [aux2Data], + }) + ) + } + + const table = await config.api.table.save( + saveTableRequest({ + schema: { + title: { + name: "title", + type: FieldType.STRING, + constraints: { presence: true }, + }, + relWithNoSchema: { + name: "relWithNoSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithNoSchema", + constraints: { presence: true }, + }, + relWithEmptySchema: { + name: "relWithEmptySchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithEmptySchema", + constraints: { presence: true }, + }, + relWithFullSchema: { + name: "relWithFullSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithFullSchema", + constraints: { presence: true }, + }, + relWithHalfSchema: { + name: "relWithHalfSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithHalfSchema", + constraints: { presence: true }, + }, + relWithIllegalSchema: { + name: "relWithIllegalSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithIllegalSchema", + constraints: { presence: true }, + }, + }, + }) + ) + tableId = table._id! + const view = await config.api.viewV2.create({ + name: generator.guid(), + tableId, + schema: { + title: { + visible: true, + }, + relWithNoSchema: { + visible: true, + }, + relWithEmptySchema: { + visible: true, + columns: {}, + }, + relWithFullSchema: { + visible: true, + columns: Object.keys(auxTable.schema).reduce< + Record + >((acc, c) => ({ ...acc, [c]: { visible: true } }), {}), + }, + relWithHalfSchema: { + visible: true, + columns: { + name: { visible: true }, + age: { visible: false, readonly: true }, + }, + }, + relWithIllegalSchema: { + visible: true, + columns: { + name: { visible: true }, + address: { visible: true }, + unexisting: { visible: true }, + }, + }, + }, + }) + + viewId = view.id + }) + + const testScenarios: [string, (row: Row) => Promise | Row][] = [ + ["get row", (row: Row) => config.api.row.get(viewId, row._id!)], + [ + "from view search", + async (row: Row) => { + const { rows } = await config.api.viewV2.search(viewId) + return rows.find(r => r._id === row._id!) + }, + ], + ["from original saved row", (row: Row) => row], + [ + "from updated row", + (row: Row) => config.api.row.save(viewId, row), + ], + ] + + it.each(testScenarios)( + "can retrieve rows with populated relationships (via %s)", + async (__, retrieveDelegate) => { + const otherRows = _.sampleSize(auxData, 5) + + const row = await config.api.row.save(viewId, { + title: generator.word(), + relWithNoSchema: [otherRows[0]], + relWithEmptySchema: [otherRows[1]], + relWithFullSchema: [otherRows[2]], + relWithHalfSchema: [otherRows[3]], + relWithIllegalSchema: [otherRows[4]], + }) + + const retrieved = await retrieveDelegate(row) + + expect(retrieved).toEqual( + expect.objectContaining({ + title: row.title, + relWithNoSchema: [ + { + _id: otherRows[0]._id, + primaryDisplay: otherRows[0].name, + }, + ], + relWithEmptySchema: [ + { + _id: otherRows[1]._id, + primaryDisplay: otherRows[1].name, + }, + ], + relWithFullSchema: [ + { + _id: otherRows[2]._id, + primaryDisplay: otherRows[2].name, + name: otherRows[2].name, + age: otherRows[2].age, + id: otherRows[2].id, + }, + ], + relWithHalfSchema: [ + { + _id: otherRows[3]._id, + primaryDisplay: otherRows[3].name, + name: otherRows[3].name, + }, + ], + relWithIllegalSchema: [ + { + _id: otherRows[4]._id, + primaryDisplay: otherRows[4].name, + name: otherRows[4].name, + }, + ], + }) + ) } ) - expect(json).toHaveLength(1) - expect(json[0].string).toEqual(badString) - }) - - it("exported data can be re-imported", async () => { - // export all - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.CSV - ) - - // import all twice - const rows = await config.api.table.csvToJson({ - csvString: exportedValue, - }) - await config.api.row.bulkImport(tableId, { - rows, - }) - await config.api.row.bulkImport(tableId, { - rows, - }) - - const { rows: allRows } = await config.api.row.search(tableId) - - const expectedRow = { - ...expectedRowData, - _id: expect.any(String), - _rev: expect.any(String), - type: "row", - tableId: tableId, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString(), - } - expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) - }) - }) - }) - - let o2mTable: Table - let m2mTable: Table - beforeAll(async () => { - o2mTable = await config.api.table.save(defaultTable()) - m2mTable = await config.api.table.save(defaultTable()) - }) - - describe.each([ - [ - "relationship fields", - (): Record => ({ - user: { - name: "user", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: o2mTable._id!, - fieldName: "fk_o2m", - }, - users: { - name: "users", - relationshipType: RelationshipType.MANY_TO_MANY, - type: FieldType.LINK, - tableId: m2mTable._id!, - fieldName: "fk_m2m", - }, - }), - (tableId: string) => - config.api.row.save(tableId, { - name: uuid.v4(), - description: generator.paragraph(), - tableId, - }), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.name, - }), - ], - [ - "bb reference fields", - (): Record => ({ - user: { - name: "user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - }, - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USERS, - }, - }), - () => config.createUser(), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.email, - email: row.email, - firstName: row.firstName, - lastName: row.lastName, - }), - ], - ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { - let tableId: string - let o2mData: Row[] - let m2mData: Row[] - - beforeAll(async () => { - const table = await config.api.table.save( - defaultTable({ schema: relSchema() }) - ) - tableId = table._id! - - o2mData = [ - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - ] - - m2mData = [ - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - ] - }) - - it("can save a row when relationship fields are empty", async () => { - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - }) - - expect(row).toEqual({ - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - name: "foo", - description: "bar", - tableId, - }) - }) - - it("can save a row with a single relationship field", async () => { - const user = _.sample(o2mData)! - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - user: [user], - }) - - expect(row).toEqual({ - name: "foo", - description: "bar", - tableId, - user: [user].map(u => resultMapper(u)), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can save a row with a multiple relationship field", async () => { - const selectedUsers = _.sampleSize(m2mData, 2) - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: selectedUsers, - }) - - expect(row).toEqual({ - name: "foo", - description: "bar", - tableId, - users: expect.arrayContaining( - selectedUsers.map(u => resultMapper(u)) - ), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("can retrieve rows with no populated relationships", async () => { - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - }) - - const retrieved = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: "foo", - description: "bar", - tableId, - user: undefined, - users: undefined, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - ...defaultRowFields, - }) - }) - - it("can retrieve rows with populated relationships", async () => { - const user1 = _.sample(o2mData)! - const [user2, user3] = _.sampleSize(m2mData, 2) - - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [user2, user3], - user: [user1], - }) - - const retrieved = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: "foo", - description: "bar", - tableId, - user: expect.arrayContaining([user1].map(u => resultMapper(u))), - users: expect.arrayContaining( - [user2, user3].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, - ...defaultRowFields, - }) - }) - - it("can update an existing populated row", async () => { - const user = _.sample(o2mData)! - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [users1, users2], - }) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: [user], - users: [users3, users1], - }) - expect(updatedRow).toEqual({ - name: "foo", - description: "bar", - tableId, - user: expect.arrayContaining([user].map(u => resultMapper(u))), - users: expect.arrayContaining( - [users3, users1].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can wipe an existing populated relationships in row", async () => { - const [user1, user2] = _.sampleSize(m2mData, 2) - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [user1, user2], - }) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: null, - users: null, - }) - expect(updatedRow).toEqual({ - name: "foo", - description: "bar", - tableId, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("fetch all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows = [ - { - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.fetch(tableId) - - expect(res).toEqual( - expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ) - ) - }) - - it("search all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows = [ - { - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.search(tableId) - - expect(res).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - }), - }) - }) - }) - - // Upserting isn't yet supported in MSSQL or Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - describe("relationships", () => { - let tableId: string - let viewId: string - - let auxData: Row[] = [] - - beforeAll(async () => { - const aux2Table = await config.api.table.save(saveTableRequest()) - const aux2Data = await config.api.row.save(aux2Table._id!, {}) - - const auxTable = await config.api.table.save( - saveTableRequest({ - primaryDisplay: "name", - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, - }, - age: { - name: "age", - type: FieldType.NUMBER, - constraints: { presence: true }, - }, - address: { - name: "address", - type: FieldType.STRING, - constraints: { presence: true }, - visible: false, - }, - link: { - name: "link", - type: FieldType.LINK, - tableId: aux2Table._id!, - relationshipType: RelationshipType.MANY_TO_MANY, - fieldName: "fk_aux", - constraints: { presence: true }, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: "{{ any }}", - constraints: { presence: true }, - }, + it.each([ + [ + "from table fetch", + async (row: Row) => { + const rows = await config.api.row.fetch(tableId) + return rows.find(r => r._id === row._id!) }, - }) - ) - const auxTableId = auxTable._id! + ], + [ + "from table search", + async (row: Row) => { + const { rows } = await config.api.row.search(tableId) + return rows.find(r => r._id === row._id!) + }, + ], + ])( + "does not enrich when fetching from the table (via %s)", + async (__, retrieveDelegate) => { + const otherRows = _.sampleSize(auxData, 5) - for (const name of generator.unique(() => generator.name(), 10)) { - auxData.push( - await config.api.row.save(auxTableId, { - name, - age: generator.age(), - address: generator.address(), - link: [aux2Data], + const row = await config.api.row.save(viewId, { + title: generator.word(), + relWithNoSchema: [otherRows[0]], + relWithEmptySchema: [otherRows[1]], + relWithFullSchema: [otherRows[2]], + relWithHalfSchema: [otherRows[3]], + relWithIllegalSchema: [otherRows[4]], + }) + + const retrieved = await retrieveDelegate(row) + + expect(retrieved).toEqual( + expect.objectContaining({ + title: row.title, + relWithNoSchema: [ + { + _id: otherRows[0]._id, + primaryDisplay: otherRows[0].name, + }, + ], + relWithEmptySchema: [ + { + _id: otherRows[1]._id, + primaryDisplay: otherRows[1].name, + }, + ], + relWithFullSchema: [ + { + _id: otherRows[2]._id, + primaryDisplay: otherRows[2].name, + }, + ], + relWithHalfSchema: [ + { + _id: otherRows[3]._id, + primaryDisplay: otherRows[3].name, + }, + ], + relWithIllegalSchema: [ + { + _id: otherRows[4]._id, + primaryDisplay: otherRows[4].name, + }, + ], + }) + ) + } + ) + }) + + isInternal && + describe("AI fields", () => { + let table: Table + + beforeAll(async () => { + mocks.licenses.useBudibaseAI() + mocks.licenses.useAICustomConfigs() + table = await config.api.table.save( + saveTableRequest({ + schema: { + ai: { + name: "ai", + type: FieldType.AI, + operation: AIOperationEnum.PROMPT, + prompt: "Convert the following to German: '{{ product }}'", + }, + product: { + name: "product", + type: FieldType.STRING, + }, + }, }) ) - } - const table = await config.api.table.save( - saveTableRequest({ - schema: { - title: { - name: "title", - type: FieldType.STRING, - constraints: { presence: true }, - }, - relWithNoSchema: { - name: "relWithNoSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithNoSchema", - constraints: { presence: true }, - }, - relWithEmptySchema: { - name: "relWithEmptySchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithEmptySchema", - constraints: { presence: true }, - }, - relWithFullSchema: { - name: "relWithFullSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithFullSchema", - constraints: { presence: true }, - }, - relWithHalfSchema: { - name: "relWithHalfSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithHalfSchema", - constraints: { presence: true }, - }, - relWithIllegalSchema: { - name: "relWithIllegalSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithIllegalSchema", - constraints: { presence: true }, - }, - }, + await config.api.row.save(table._id!, { + product: generator.word(), }) - ) - tableId = table._id! - const view = await config.api.viewV2.create({ - name: generator.guid(), - tableId, - schema: { - title: { - visible: true, - }, - relWithNoSchema: { - visible: true, - }, - relWithEmptySchema: { - visible: true, - columns: {}, - }, - relWithFullSchema: { - visible: true, - columns: Object.keys(auxTable.schema).reduce< - Record - >((acc, c) => ({ ...acc, [c]: { visible: true } }), {}), - }, - relWithHalfSchema: { - visible: true, - columns: { - name: { visible: true }, - age: { visible: false, readonly: true }, - }, - }, - relWithIllegalSchema: { - visible: true, - columns: { - name: { visible: true }, - address: { visible: true }, - unexisting: { visible: true }, - }, - }, - }, }) - viewId = view.id + afterAll(() => { + jest.unmock("@budibase/pro") + }) + + it("should be able to save a row with an AI column", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + expect(rows[0].ai).toEqual("Mock LLM Response") + }) + + it("should be able to update a row with an AI column", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + await config.api.row.save(table._id!, { + product: generator.word(), + ...rows[0], + }) + expect(rows.length).toBe(1) + expect(rows[0].ai).toEqual("Mock LLM Response") + }) }) - const testScenarios: [string, (row: Row) => Promise | Row][] = [ - ["get row", (row: Row) => config.api.row.get(viewId, row._id!)], - [ - "from view search", - async (row: Row) => { - const { rows } = await config.api.viewV2.search(viewId) - return rows.find(r => r._id === row._id!) - }, - ], - ["from original saved row", (row: Row) => row], - ["from updated row", (row: Row) => config.api.row.save(viewId, row)], - ] - - it.each(testScenarios)( - "can retrieve rows with populated relationships (via %s)", - async (__, retrieveDelegate) => { - const otherRows = _.sampleSize(auxData, 5) - - const row = await config.api.row.save(viewId, { - title: generator.word(), - relWithNoSchema: [otherRows[0]], - relWithEmptySchema: [otherRows[1]], - relWithFullSchema: [otherRows[2]], - relWithHalfSchema: [otherRows[3]], - relWithIllegalSchema: [otherRows[4]], - }) - - const retrieved = await retrieveDelegate(row) - - expect(retrieved).toEqual( - expect.objectContaining({ - title: row.title, - relWithNoSchema: [ - { - _id: otherRows[0]._id, - primaryDisplay: otherRows[0].name, - }, - ], - relWithEmptySchema: [ - { - _id: otherRows[1]._id, - primaryDisplay: otherRows[1].name, - }, - ], - relWithFullSchema: [ - { - _id: otherRows[2]._id, - primaryDisplay: otherRows[2].name, - name: otherRows[2].name, - age: otherRows[2].age, - id: otherRows[2].id, - }, - ], - relWithHalfSchema: [ - { - _id: otherRows[3]._id, - primaryDisplay: otherRows[3].name, - name: otherRows[3].name, - }, - ], - relWithIllegalSchema: [ - { - _id: otherRows[4]._id, - primaryDisplay: otherRows[4].name, - name: otherRows[4].name, - }, - ], - }) - ) - } - ) - - it.each([ - [ - "from table fetch", - async (row: Row) => { - const rows = await config.api.row.fetch(tableId) - return rows.find(r => r._id === row._id!) - }, - ], - [ - "from table search", - async (row: Row) => { - const { rows } = await config.api.row.search(tableId) - return rows.find(r => r._id === row._id!) - }, - ], - ])( - "does not enrich when fetching from the table (via %s)", - async (__, retrieveDelegate) => { - const otherRows = _.sampleSize(auxData, 5) - - const row = await config.api.row.save(viewId, { - title: generator.word(), - relWithNoSchema: [otherRows[0]], - relWithEmptySchema: [otherRows[1]], - relWithFullSchema: [otherRows[2]], - relWithHalfSchema: [otherRows[3]], - relWithIllegalSchema: [otherRows[4]], - }) - - const retrieved = await retrieveDelegate(row) - - expect(retrieved).toEqual( - expect.objectContaining({ - title: row.title, - relWithNoSchema: [ - { - _id: otherRows[0]._id, - primaryDisplay: otherRows[0].name, - }, - ], - relWithEmptySchema: [ - { - _id: otherRows[1]._id, - primaryDisplay: otherRows[1].name, - }, - ], - relWithFullSchema: [ - { - _id: otherRows[2]._id, - primaryDisplay: otherRows[2].name, - }, - ], - relWithHalfSchema: [ - { - _id: otherRows[3]._id, - primaryDisplay: otherRows[3].name, - }, - ], - relWithIllegalSchema: [ - { - _id: otherRows[4]._id, - primaryDisplay: otherRows[4].name, - }, - ], - }) - ) - } - ) - }) - - isInternal && - describe("AI fields", () => { + describe("Formula fields", () => { let table: Table + let otherTable: Table + let relatedRow: Row, mainRow: Row beforeAll(async () => { - mocks.licenses.useBudibaseAI() - mocks.licenses.useAICustomConfigs() + otherTable = await config.api.table.save(defaultTable()) table = await config.api.table.save( saveTableRequest({ schema: { - ai: { - name: "ai", - type: FieldType.AI, - operation: AIOperationEnum.PROMPT, - prompt: "Convert the following to German: '{{ product }}'", - }, - product: { - name: "product", - type: FieldType.STRING, - }, - }, - }) - ) - - await config.api.row.save(table._id!, { - product: generator.word(), - }) - }) - - afterAll(() => { - jest.unmock("@budibase/pro") - }) - - it("should be able to save a row with an AI column", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - expect(rows[0].ai).toEqual("Mock LLM Response") - }) - - it("should be able to update a row with an AI column", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - await config.api.row.save(table._id!, { - product: generator.word(), - ...rows[0], - }) - expect(rows.length).toBe(1) - expect(rows[0].ai).toEqual("Mock LLM Response") - }) - }) - - describe("Formula fields", () => { - let table: Table - let otherTable: Table - let relatedRow: Row, mainRow: Row - - beforeAll(async () => { - otherTable = await config.api.table.save(defaultTable()) - table = await config.api.table.save( - saveTableRequest({ - schema: { - links: { - name: "links", - fieldName: "links", - type: FieldType.LINK, - tableId: otherTable._id!, - relationshipType: RelationshipType.ONE_TO_MANY, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: "{{ links.0.name }}", - formulaType: FormulaType.DYNAMIC, - }, - }, - }) - ) - - relatedRow = await config.api.row.save(otherTable._id!, { - name: generator.word(), - description: generator.paragraph(), - }) - mainRow = await config.api.row.save(table._id!, { - name: generator.word(), - description: generator.paragraph(), - tableId: table._id!, - links: [relatedRow._id], - }) - }) - - async function updateFormulaColumn( - formula: string, - opts?: { responseType?: FormulaResponseType; formulaType?: FormulaType } - ) { - table = await config.api.table.save({ - ...table, - schema: { - ...table.schema, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: formula, - responseType: opts?.responseType, - formulaType: opts?.formulaType || FormulaType.DYNAMIC, - }, - }, - }) - } - - it("should be able to search for rows containing formulas", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - expect(rows[0].links.length).toBe(1) - const row = rows[0] - expect(row.formula).toBe(relatedRow.name) - }) - - it("should coerce - number response type", async () => { - await updateFormulaColumn(encodeJS("return 1"), { - responseType: FieldType.NUMBER, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(1) - }) - - it("should coerce - boolean response type", async () => { - await updateFormulaColumn(encodeJS("return true"), { - responseType: FieldType.BOOLEAN, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(true) - }) - - it("should coerce - datetime response type", async () => { - await updateFormulaColumn(encodeJS("return new Date()"), { - responseType: FieldType.DATETIME, - }) - const { rows } = await config.api.row.search(table._id!) - expect(isDate(rows[0].formula)).toBe(true) - }) - - it("should coerce - datetime with invalid value", async () => { - await updateFormulaColumn(encodeJS("return 'a'"), { - responseType: FieldType.DATETIME, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBeUndefined() - }) - - it("should coerce handlebars", async () => { - await updateFormulaColumn("{{ add 1 1 }}", { - responseType: FieldType.NUMBER, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(2) - }) - - it("should coerce handlebars to string (default)", async () => { - await updateFormulaColumn("{{ add 1 1 }}", { - responseType: FieldType.STRING, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe("2") - }) - - isInternal && - it("should coerce a static handlebars formula", async () => { - await updateFormulaColumn(encodeJS("return 1"), { - responseType: FieldType.NUMBER, - formulaType: FormulaType.STATIC, - }) - // save the row to store the static value - await config.api.row.save(table._id!, mainRow) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(1) - }) - }) - - describe("Formula JS protection", () => { - it("should time out JS execution if a single cell takes too long", async () => { - await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => { - const js = encodeJS( - ` - let i = 0; - while (true) { - i++; - } - return i; - ` - ) - - const table = await config.api.table.save( - saveTableRequest({ - schema: { - text: { - name: "text", - type: FieldType.STRING, + links: { + name: "links", + fieldName: "links", + type: FieldType.LINK, + tableId: otherTable._id!, + relationshipType: RelationshipType.ONE_TO_MANY, }, formula: { name: "formula", type: FieldType.FORMULA, - formula: js, + formula: "{{ links.0.name }}", formulaType: FormulaType.DYNAMIC, }, }, }) ) - await config.api.row.save(table._id!, { text: "foo" }) - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(1) - const row = rows[0] - expect(row.text).toBe("foo") - expect(row.formula).toBe("Timed out while executing JS") + relatedRow = await config.api.row.save(otherTable._id!, { + name: generator.word(), + description: generator.paragraph(), + }) + mainRow = await config.api.row.save(table._id!, { + name: generator.word(), + description: generator.paragraph(), + tableId: table._id!, + links: [relatedRow._id], + }) }) + + async function updateFormulaColumn( + formula: string, + opts?: { + responseType?: FormulaResponseType + formulaType?: FormulaType + } + ) { + table = await config.api.table.save({ + ...table, + schema: { + ...table.schema, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: formula, + responseType: opts?.responseType, + formulaType: opts?.formulaType || FormulaType.DYNAMIC, + }, + }, + }) + } + + it("should be able to search for rows containing formulas", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + expect(rows[0].links.length).toBe(1) + const row = rows[0] + expect(row.formula).toBe(relatedRow.name) + }) + + it("should coerce - number response type", async () => { + await updateFormulaColumn(encodeJS("return 1"), { + responseType: FieldType.NUMBER, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(1) + }) + + it("should coerce - boolean response type", async () => { + await updateFormulaColumn(encodeJS("return true"), { + responseType: FieldType.BOOLEAN, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(true) + }) + + it("should coerce - datetime response type", async () => { + await updateFormulaColumn(encodeJS("return new Date()"), { + responseType: FieldType.DATETIME, + }) + const { rows } = await config.api.row.search(table._id!) + expect(isDate(rows[0].formula)).toBe(true) + }) + + it("should coerce - datetime with invalid value", async () => { + await updateFormulaColumn(encodeJS("return 'a'"), { + responseType: FieldType.DATETIME, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBeUndefined() + }) + + it("should coerce handlebars", async () => { + await updateFormulaColumn("{{ add 1 1 }}", { + responseType: FieldType.NUMBER, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(2) + }) + + it("should coerce handlebars to string (default)", async () => { + await updateFormulaColumn("{{ add 1 1 }}", { + responseType: FieldType.STRING, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe("2") + }) + + isInternal && + it("should coerce a static handlebars formula", async () => { + await updateFormulaColumn(encodeJS("return 1"), { + responseType: FieldType.NUMBER, + formulaType: FormulaType.STATIC, + }) + // save the row to store the static value + await config.api.row.save(table._id!, mainRow) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(1) + }) }) - it("should time out JS execution if a multiple cells take too long", async () => { - await withEnv( - { - JS_PER_INVOCATION_TIMEOUT_MS: 40, - JS_PER_REQUEST_TIMEOUT_MS: 80, - }, - async () => { + describe("Formula JS protection", () => { + it("should time out JS execution if a single cell takes too long", async () => { + await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => { const js = encodeJS( ` let i = 0; @@ -3402,84 +3375,126 @@ datasourceDescribe( }) ) - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: "foo" }) - } + await config.api.row.save(table._id!, { text: "foo" }) + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(1) + const row = rows[0] + expect(row.text).toBe("foo") + expect(row.formula).toBe("Timed out while executing JS") + }) + }) - // Run this test 3 times to make sure that there's no cross-request - // pollution of the execution time tracking. - for (let reqs = 0; reqs < 3; reqs++) { - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) + it("should time out JS execution if a multiple cells take too long", async () => { + await withEnv( + { + JS_PER_INVOCATION_TIMEOUT_MS: 40, + JS_PER_REQUEST_TIMEOUT_MS: 80, + }, + async () => { + const js = encodeJS( + ` + let i = 0; + while (true) { + i++; + } + return i; + ` + ) - let i = 0 - for (; i < 10; i++) { - const row = rows[i] - if (row.formula !== JsTimeoutError.message) { - break + const table = await config.api.table.save( + saveTableRequest({ + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: js, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + ) + + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: "foo" }) + } + + // Run this test 3 times to make sure that there's no cross-request + // pollution of the execution time tracking. + for (let reqs = 0; reqs < 3; reqs++) { + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + let i = 0 + for (; i < 10; i++) { + const row = rows[i] + if (row.formula !== JsTimeoutError.message) { + break + } + } + + // Given the execution times are not deterministic, we can't be sure + // of the exact number of rows that were executed before the timeout + // but it should absolutely be at least 1. + expect(i).toBeGreaterThan(0) + expect(i).toBeLessThan(5) + + for (; i < 10; i++) { + const row = rows[i] + expect(row.text).toBe("foo") + expect(row.formula).toStartWith("CPU time limit exceeded ") } } - - // Given the execution times are not deterministic, we can't be sure - // of the exact number of rows that were executed before the timeout - // but it should absolutely be at least 1. - expect(i).toBeGreaterThan(0) - expect(i).toBeLessThan(5) - - for (; i < 10; i++) { - const row = rows[i] - expect(row.text).toBe("foo") - expect(row.formula).toStartWith("CPU time limit exceeded ") - } } + ) + }) + + it("should not carry over context between formulas", async () => { + const js = encodeJS(`return $("[text]");`) + const table = await config.api.table.save( + saveTableRequest({ + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: js, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + ) + + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: `foo${i}` }) } - ) + + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + const formulaValues = rows.map(r => r.formula) + expect(formulaValues).toEqual( + expect.arrayContaining([ + "foo0", + "foo1", + "foo2", + "foo3", + "foo4", + "foo5", + "foo6", + "foo7", + "foo8", + "foo9", + ]) + ) + }) }) - - it("should not carry over context between formulas", async () => { - const js = encodeJS(`return $("[text]");`) - const table = await config.api.table.save( - saveTableRequest({ - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: js, - formulaType: FormulaType.DYNAMIC, - }, - }, - }) - ) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: `foo${i}` }) - } - - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) - - const formulaValues = rows.map(r => r.formula) - expect(formulaValues).toEqual( - expect.arrayContaining([ - "foo0", - "foo1", - "foo2", - "foo3", - "foo4", - "foo5", - "foo6", - "foo7", - "foo8", - "foo9", - ]) - ) - }) - }) - } -) - -// todo: remove me + } + ) +} diff --git a/packages/server/src/api/routes/tests/rowAction.spec.ts b/packages/server/src/api/routes/tests/rowAction.spec.ts index 58d7509798..76046c06ea 100644 --- a/packages/server/src/api/routes/tests/rowAction.spec.ts +++ b/packages/server/src/api/routes/tests/rowAction.spec.ts @@ -977,63 +977,69 @@ describe("/rowsActions", () => { }) }) -datasourceDescribe( - { name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] }, - ({ config, dsProvider, isInternal }) => { - let datasource: Datasource | undefined +const descriptions = datasourceDescribe({ + only: [DatabaseName.SQS, DatabaseName.POSTGRES], +}) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - }) +if (descriptions.length) { + describe.each(descriptions)( + "row actions ($dbName)", + ({ config, dsProvider, isInternal }) => { + let datasource: Datasource | undefined - async function getTable(): Promise { - if (isInternal) { - await config.api.application.addSampleData(config.getAppId()) - const tables = await config.api.table.fetch() - return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)! - } else { - const table = await config.api.table.save( - setup.structures.tableForDatasource(datasource!) - ) - return table - } - } + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource + }) - it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => { - async function getRowActionsFromDb(tableId: string) { - return await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - const tableDoc = await db.tryGet( - generateRowActionsID(tableId) + async function getTable(): Promise
{ + if (isInternal) { + await config.api.application.addSampleData(config.getAppId()) + const tables = await config.api.table.fetch() + return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)! + } else { + const table = await config.api.table.save( + setup.structures.tableForDatasource(datasource!) ) - return tableDoc - }) + return table + } } - const table = await getTable() - const tableId = table._id! + it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => { + async function getRowActionsFromDb(tableId: string) { + return await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + const tableDoc = await db.tryGet( + generateRowActionsID(tableId) + ) + return tableDoc + }) + } - await config.api.rowAction.save(tableId, { - name: generator.guid(), + const table = await getTable() + const tableId = table._id! + + await config.api.rowAction.save(tableId, { + name: generator.guid(), + }) + await config.api.rowAction.save(tableId, { + name: generator.guid(), + }) + + const { actions } = (await getRowActionsFromDb(tableId))! + expect(Object.entries(actions)).toHaveLength(2) + + const { automations } = await config.api.automation.fetch() + expect(automations).toHaveLength(2) + + const datasource = await config.api.datasource.get(table.sourceId) + await config.api.datasource.delete(datasource) + + const automationsResp = await config.api.automation.fetch() + expect(automationsResp.automations).toHaveLength(0) + + expect(await getRowActionsFromDb(tableId)).toBeUndefined() }) - await config.api.rowAction.save(tableId, { - name: generator.guid(), - }) - - const { actions } = (await getRowActionsFromDb(tableId))! - expect(Object.entries(actions)).toHaveLength(2) - - const { automations } = await config.api.automation.fetch() - expect(automations).toHaveLength(2) - - const datasource = await config.api.datasource.get(table.sourceId) - await config.api.datasource.delete(datasource) - - const automationsResp = await config.api.automation.fetch() - expect(automationsResp.automations).toHaveLength(0) - - expect(await getRowActionsFromDb(tableId)).toBeUndefined() - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index 2c13cac014..5edc4f29ad 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -60,3690 +60,3781 @@ jest.mock("@budibase/pro", () => ({ }, })) -datasourceDescribe( - { - name: "search (%s)", - exclude: [DatabaseName.MONGODB], - }, - ({ config, dsProvider, isInternal, isOracle, isSql }) => { - let datasource: Datasource | undefined - let client: Knex | undefined - let tableOrViewId: string - let rows: Row[] +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - async function basicRelationshipTables(type: RelationshipType) { - const relatedTable = await createTable({ - name: { name: "name", type: FieldType.STRING }, - }) - const tableId = await createTable({ - name: { name: "name", type: FieldType.STRING }, - //@ts-ignore - API accepts this structure, will build out rest of definition - productCat: { - type: FieldType.LINK, - relationshipType: type, - name: "productCat", - fieldName: "product", - tableId: relatedTable, - constraints: { - type: "array", +if (descriptions.length) { + describe.each(descriptions)( + "search ($dbName)", + ({ config, dsProvider, isInternal, isOracle, isSql }) => { + let datasource: Datasource | undefined + let client: Knex | undefined + let tableOrViewId: string + let rows: Row[] + + async function basicRelationshipTables(type: RelationshipType) { + const relatedTable = await createTable({ + name: { name: "name", type: FieldType.STRING }, + }) + const tableId = await createTable({ + name: { name: "name", type: FieldType.STRING }, + //@ts-ignore - API accepts this structure, will build out rest of definition + productCat: { + type: FieldType.LINK, + relationshipType: type, + name: "productCat", + fieldName: "product", + tableId: relatedTable, + constraints: { + type: "array", + }, }, - }, - }) - return { - relatedTable: await config.api.table.get(relatedTable), - tableId, + }) + return { + relatedTable: await config.api.table.get(relatedTable), + tableId, + } } - } - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - client = ds.client + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource + client = ds.client - config.app = await config.api.application.update(config.getAppId(), { - snippets: [ - { - name: "WeeksAgo", - code: ` + config.app = await config.api.application.update(config.getAppId(), { + snippets: [ + { + name: "WeeksAgo", + code: ` return function (weeks) { const currentTime = new Date(${Date.now()}); currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1))); return currentTime.toISOString(); } `, - }, - ], - }) - }) - - async function createTable(schema?: TableSchema) { - const table = await config.api.table.save( - tableForDatasource(datasource, { schema }) - ) - return table._id! - } - - async function createView(tableId: string, schema?: ViewV2Schema) { - const view = await config.api.viewV2.create({ - tableId: tableId, - name: generator.guid(), - schema, - }) - return view.id - } - - async function createRows(arr: Record[]) { - // Shuffling to avoid false positives given a fixed order - for (const row of _.shuffle(arr)) { - await config.api.row.save(tableOrViewId, row) - } - rows = await config.api.row.fetch(tableOrViewId) - } - - async function getTable(tableOrViewId: string): Promise
{ - if (docIds.isViewId(tableOrViewId)) { - const view = await config.api.viewV2.get(tableOrViewId) - return await config.api.table.get(view.tableId) - } else { - return await config.api.table.get(tableOrViewId) - } - } - - async function assertTableExists(nameOrTable: string | Table) { - const name = - typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name - expect(await client!.schema.hasTable(name)).toBeTrue() - } - - async function assertTableNumRows( - nameOrTable: string | Table, - numRows: number - ) { - const name = - typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name - const row = await client!.from(name).count() - const count = parseInt(Object.values(row[0])[0] as string) - expect(count).toEqual(numRows) - } - - describe.each([true, false])("in-memory: %s", isInMemory => { - // We only run the in-memory tests during the SQS (isInternal) run - if (isInMemory && !isInternal) { - return - } - - type CreateFn = (schema?: TableSchema) => Promise - let tableOrView: [string, CreateFn][] = [["table", createTable]] - - if (!isInMemory) { - tableOrView.push([ - "view", - async (schema?: TableSchema) => { - const tableId = await createTable(schema) - const viewId = await createView( - tableId, - Object.keys(schema || {}).reduce( - (viewSchema, fieldName) => { - const field = schema![fieldName] - viewSchema[fieldName] = { - visible: field.visible ?? true, - readonly: false, - } - return viewSchema - }, - {} - ) - ) - return viewId - }, - ]) - } - - describe.each(tableOrView)("from %s", (sourceType, createTableOrView) => { - const isView = sourceType === "view" - - class SearchAssertion { - constructor(private readonly query: SearchRowRequest) {} - - private async performSearch(): Promise> { - if (isInMemory) { - const query: RequiredKeys> = { - sort: this.query.sort, - query: this.query.query || {}, - paginate: this.query.paginate, - bookmark: this.query.bookmark ?? undefined, - limit: this.query.limit, - sortOrder: this.query.sortOrder, - sortType: this.query.sortType, - version: this.query.version, - disableEscaping: this.query.disableEscaping, - countRows: this.query.countRows, - viewId: undefined, - fields: undefined, - indexer: undefined, - rows: undefined, - } - return dataFilters.search(_.cloneDeep(rows), query) - } else { - return config.api.row.search(tableOrViewId, this.query) - } - } - - // We originally used _.isMatch to compare rows, but found that when - // comparing arrays it would return true if the source array was a subset of - // the target array. This would sometimes create false matches. This - // function is a more strict version of _.isMatch that only returns true if - // the source array is an exact match of the target. - // - // _.isMatch("100", "1") also returns true which is not what we want. - private isMatch>( - expected: T, - found: T - ) { - if (!expected) { - throw new Error("Expected is undefined") - } - if (!found) { - return false - } - - for (const key of Object.keys(expected)) { - if (Array.isArray(expected[key])) { - if (!Array.isArray(found[key])) { - return false - } - if (expected[key].length !== found[key].length) { - return false - } - if (!_.isMatch(found[key], expected[key])) { - return false - } - } else if (typeof expected[key] === "object") { - if (!this.isMatch(expected[key], found[key])) { - return false - } - } else { - if (expected[key] !== found[key]) { - return false - } - } - } - return true - } - - // This function exists to ensure that the same row is not matched twice. - // When a row gets matched, we make sure to remove it from the list of rows - // we're matching against. - private popRow( - expectedRow: T, - foundRows: T[] - ): NonNullable { - const row = foundRows.find(row => this.isMatch(expectedRow, row)) - if (!row) { - const fields = Object.keys(expectedRow) - // To make the error message more readable, we only include the fields - // that are present in the expected row. - const searchedObjects = foundRows.map(row => _.pick(row, fields)) - throw new Error( - `Failed to find row:\n\n${JSON.stringify( - expectedRow, - null, - 2 - )}\n\nin\n\n${JSON.stringify(searchedObjects, null, 2)}` - ) - } - - foundRows.splice(foundRows.indexOf(row), 1) - return row - } - - // Asserts that the query returns rows matching exactly the set of rows - // passed in. The order of the rows matters. Rows returned in an order - // different to the one passed in will cause the assertion to fail. Extra - // rows returned by the query will also cause the assertion to fail. - async toMatchExactly(expectedRows: any[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const foundRows = response.rows - - // eslint-disable-next-line jest/no-standalone-expect - expect(foundRows).toHaveLength(expectedRows.length) - // eslint-disable-next-line jest/no-standalone-expect - expect([...foundRows]).toEqual( - expectedRows.map((expectedRow: any) => - expect.objectContaining(this.popRow(expectedRow, foundRows)) - ) - ) - return cloned - } - - // Asserts that the query returns rows matching exactly the set of rows - // passed in. The order of the rows is not important, but extra rows will - // cause the assertion to fail. - async toContainExactly(expectedRows: any[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const foundRows = response.rows - - // eslint-disable-next-line jest/no-standalone-expect - expect(foundRows).toHaveLength(expectedRows.length) - // eslint-disable-next-line jest/no-standalone-expect - expect([...foundRows]).toEqual( - expect.arrayContaining( - expectedRows.map((expectedRow: any) => - expect.objectContaining(this.popRow(expectedRow, foundRows)) - ) - ) - ) - return cloned - } - - // Asserts that the query returns some property values - this cannot be used - // to check row values, however this shouldn't be important for checking properties - // typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...) - async toMatch(properties: Record) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const keys = Object.keys(properties) as Array< - keyof SearchResponse - > - for (let key of keys) { - // eslint-disable-next-line jest/no-standalone-expect - expect(response[key]).toBeDefined() - if (properties[key]) { - // eslint-disable-next-line jest/no-standalone-expect - expect(response[key]).toEqual(properties[key]) - } - } - return cloned - } - - // Asserts that the query doesn't return a property, e.g. pagination parameters. - async toNotHaveProperty(properties: (keyof SearchResponse)[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - for (let property of properties) { - // eslint-disable-next-line jest/no-standalone-expect - expect(response[property]).toBeUndefined() - } - return cloned - } - - // Asserts that the query returns rows matching the set of rows passed in. - // The order of the rows is not important. Extra rows will not cause the - // assertion to fail. - async toContain(expectedRows: any[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const foundRows = response.rows - - // eslint-disable-next-line jest/no-standalone-expect - expect([...foundRows]).toEqual( - expect.arrayContaining( - expectedRows.map((expectedRow: any) => - expect.objectContaining(this.popRow(expectedRow, foundRows)) - ) - ) - ) - return cloned - } - - async toFindNothing() { - await this.toContainExactly([]) - } - - async toHaveLength(length: number) { - const { rows: foundRows } = await this.performSearch() - - // eslint-disable-next-line jest/no-standalone-expect - expect(foundRows).toHaveLength(length) - } - } - - function expectSearch(query: SearchRowRequest) { - return new SearchAssertion(query) - } - - function expectQuery(query: SearchFilters) { - return expectSearch({ query }) - } - - describe("boolean", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - isTrue: { name: "isTrue", type: FieldType.BOOLEAN }, - }) - await createRows([{ isTrue: true }, { isTrue: false }]) - }) - - describe("equal", () => { - it("successfully finds true row", async () => { - await expectQuery({ equal: { isTrue: true } }).toMatchExactly([ - { isTrue: true }, - ]) - }) - - it("successfully finds false row", async () => { - await expectQuery({ equal: { isTrue: false } }).toMatchExactly([ - { isTrue: false }, - ]) - }) - }) - - describe("notEqual", () => { - it("successfully finds false row", async () => { - await expectQuery({ - notEqual: { isTrue: true }, - }).toContainExactly([{ isTrue: false }]) - }) - - it("successfully finds true row", async () => { - await expectQuery({ - notEqual: { isTrue: false }, - }).toContainExactly([{ isTrue: true }]) - }) - }) - - describe("oneOf", () => { - it("successfully finds true row", async () => { - await expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly( - [{ isTrue: true }] - ) - }) - - it("successfully finds false row", async () => { - await expectQuery({ - oneOf: { isTrue: [false] }, - }).toContainExactly([{ isTrue: false }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "isTrue", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ isTrue: false }, { isTrue: true }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "isTrue", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ isTrue: true }, { isTrue: false }]) - }) - }) + }, + ], }) + }) - !isInMemory && - describe("bindings", () => { - let globalUsers: any = [] + async function createTable(schema?: TableSchema) { + const table = await config.api.table.save( + tableForDatasource(datasource, { schema }) + ) + return table._id! + } - const serverTime = new Date() + async function createView(tableId: string, schema?: ViewV2Schema) { + const view = await config.api.viewV2.create({ + tableId: tableId, + name: generator.guid(), + schema, + }) + return view.id + } - // In MariaDB and MySQL we only store dates to second precision, so we need - // to remove milliseconds from the server time to ensure searches work as - // expected. - serverTime.setMilliseconds(0) + async function createRows(arr: Record[]) { + // Shuffling to avoid false positives given a fixed order + for (const row of _.shuffle(arr)) { + await config.api.row.save(tableOrViewId, row) + } + rows = await config.api.row.fetch(tableOrViewId) + } - const future = new Date( - serverTime.getTime() + 1000 * 60 * 60 * 24 * 30 - ) + async function getTable(tableOrViewId: string): Promise
{ + if (docIds.isViewId(tableOrViewId)) { + const view = await config.api.viewV2.get(tableOrViewId) + return await config.api.table.get(view.tableId) + } else { + return await config.api.table.get(tableOrViewId) + } + } - const rows = (currentUser: User) => { - return [ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - { - name: currentUser.firstName, - appointment: future.toISOString(), - }, - { name: "serverDate", appointment: serverTime.toISOString() }, - { - name: "single user, session user", - single_user: currentUser, - }, - { - name: "single user", - single_user: globalUsers[0], - }, - { - name: "deprecated single user, session user", - deprecated_single_user: [currentUser], - }, - { - name: "deprecated single user", - deprecated_single_user: [globalUsers[0]], - }, - { - name: "multi user", - multi_user: globalUsers, - }, - { - name: "multi user with session user", - multi_user: [...globalUsers, currentUser], - }, - { - name: "deprecated multi user", - deprecated_multi_user: globalUsers, - }, - { - name: "deprecated multi user with session user", - deprecated_multi_user: [...globalUsers, currentUser], - }, - ] + async function assertTableExists(nameOrTable: string | Table) { + const name = + typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name + expect(await client!.schema.hasTable(name)).toBeTrue() + } + + async function assertTableNumRows( + nameOrTable: string | Table, + numRows: number + ) { + const name = + typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name + const row = await client!.from(name).count() + const count = parseInt(Object.values(row[0])[0] as string) + expect(count).toEqual(numRows) + } + + describe.each([true, false])("in-memory: %s", isInMemory => { + // We only run the in-memory tests during the SQS (isInternal) run + if (isInMemory && !isInternal) { + return + } + + type CreateFn = (schema?: TableSchema) => Promise + let tableOrView: [string, CreateFn][] = [["table", createTable]] + + if (!isInMemory) { + tableOrView.push([ + "view", + async (schema?: TableSchema) => { + const tableId = await createTable(schema) + const viewId = await createView( + tableId, + Object.keys(schema || {}).reduce( + (viewSchema, fieldName) => { + const field = schema![fieldName] + viewSchema[fieldName] = { + visible: field.visible ?? true, + readonly: false, + } + return viewSchema + }, + {} + ) + ) + return viewId + }, + ]) + } + + describe.each(tableOrView)( + "from %s", + (sourceType, createTableOrView) => { + const isView = sourceType === "view" + + class SearchAssertion { + constructor(private readonly query: SearchRowRequest) {} + + private async performSearch(): Promise> { + if (isInMemory) { + return dataFilters.search(_.cloneDeep(rows), { + ...this.query, + }) + } else { + return config.api.row.search(tableOrViewId, this.query) + } + } + + // We originally used _.isMatch to compare rows, but found that when + // comparing arrays it would return true if the source array was a subset of + // the target array. This would sometimes create false matches. This + // function is a more strict version of _.isMatch that only returns true if + // the source array is an exact match of the target. + // + // _.isMatch("100", "1") also returns true which is not what we want. + private isMatch>( + expected: T, + found: T + ) { + if (!expected) { + throw new Error("Expected is undefined") + } + if (!found) { + return false + } + + for (const key of Object.keys(expected)) { + if (Array.isArray(expected[key])) { + if (!Array.isArray(found[key])) { + return false + } + if (expected[key].length !== found[key].length) { + return false + } + if (!_.isMatch(found[key], expected[key])) { + return false + } + } else if (typeof expected[key] === "object") { + if (!this.isMatch(expected[key], found[key])) { + return false + } + } else { + if (expected[key] !== found[key]) { + return false + } + } + } + return true + } + + // This function exists to ensure that the same row is not matched twice. + // When a row gets matched, we make sure to remove it from the list of rows + // we're matching against. + private popRow( + expectedRow: T, + foundRows: T[] + ): NonNullable { + const row = foundRows.find(row => + this.isMatch(expectedRow, row) + ) + if (!row) { + const fields = Object.keys(expectedRow) + // To make the error message more readable, we only include the fields + // that are present in the expected row. + const searchedObjects = foundRows.map(row => + _.pick(row, fields) + ) + throw new Error( + `Failed to find row:\n\n${JSON.stringify( + expectedRow, + null, + 2 + )}\n\nin\n\n${JSON.stringify(searchedObjects, null, 2)}` + ) + } + + foundRows.splice(foundRows.indexOf(row), 1) + return row + } + + // Asserts that the query returns rows matching exactly the set of rows + // passed in. The order of the rows matters. Rows returned in an order + // different to the one passed in will cause the assertion to fail. Extra + // rows returned by the query will also cause the assertion to fail. + async toMatchExactly(expectedRows: any[]) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const foundRows = response.rows + + // eslint-disable-next-line jest/no-standalone-expect + expect(foundRows).toHaveLength(expectedRows.length) + // eslint-disable-next-line jest/no-standalone-expect + expect([...foundRows]).toEqual( + expectedRows.map((expectedRow: any) => + expect.objectContaining(this.popRow(expectedRow, foundRows)) + ) + ) + return cloned + } + + // Asserts that the query returns rows matching exactly the set of rows + // passed in. The order of the rows is not important, but extra rows will + // cause the assertion to fail. + async toContainExactly(expectedRows: any[]) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const foundRows = response.rows + + // eslint-disable-next-line jest/no-standalone-expect + expect(foundRows).toHaveLength(expectedRows.length) + // eslint-disable-next-line jest/no-standalone-expect + expect([...foundRows]).toEqual( + expect.arrayContaining( + expectedRows.map((expectedRow: any) => + expect.objectContaining( + this.popRow(expectedRow, foundRows) + ) + ) + ) + ) + return cloned + } + + // Asserts that the query returns some property values - this cannot be used + // to check row values, however this shouldn't be important for checking properties + // typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...) + async toMatch(properties: Record) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const keys = Object.keys(properties) as Array< + keyof SearchResponse + > + for (let key of keys) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toBeDefined() + if (properties[key]) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toEqual(properties[key]) + } + } + return cloned + } + + // Asserts that the query doesn't return a property, e.g. pagination parameters. + async toNotHaveProperty( + properties: (keyof SearchResponse)[] + ) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + for (let property of properties) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[property]).toBeUndefined() + } + return cloned + } + + // Asserts that the query returns rows matching the set of rows passed in. + // The order of the rows is not important. Extra rows will not cause the + // assertion to fail. + async toContain(expectedRows: any[]) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const foundRows = response.rows + + // eslint-disable-next-line jest/no-standalone-expect + expect([...foundRows]).toEqual( + expect.arrayContaining( + expectedRows.map((expectedRow: any) => + expect.objectContaining( + this.popRow(expectedRow, foundRows) + ) + ) + ) + ) + return cloned + } + + async toFindNothing() { + await this.toContainExactly([]) + } + + async toHaveLength(length: number) { + const { rows: foundRows } = await this.performSearch() + + // eslint-disable-next-line jest/no-standalone-expect + expect(foundRows).toHaveLength(length) + } } - beforeAll(async () => { - // Set up some global users - globalUsers = await Promise.all( - Array(2) - .fill(0) - .map(async () => { - const globalUser = await config.globalUser() - const userMedataId = globalUser._id - ? dbCore.generateUserMetadataID(globalUser._id) - : null - return { - _id: globalUser._id, - _meta: userMedataId, - } - }) - ) + function expectSearch(query: SearchRowRequest) { + return new SearchAssertion(query) + } - tableOrViewId = await createTableOrView({ - name: { name: "name", type: FieldType.STRING }, - appointment: { name: "appointment", type: FieldType.DATETIME }, - single_user: { - name: "single_user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - deprecated_single_user: { - name: "deprecated_single_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - }, - multi_user: { - name: "multi_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }, - deprecated_multi_user: { - name: "deprecated_multi_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USERS, - constraints: { - type: "array", - }, - }, - }) - await createRows(rows(config.getUser())) - }) + function expectQuery(query: SearchFilters) { + return expectSearch({ query }) + } - // !! Current User is auto generated per run - it("should return all rows matching the session user firstname", async () => { - await expectQuery({ - equal: { name: "{{ [user].firstName }}" }, - }).toContainExactly([ - { - name: config.getUser().firstName, - appointment: future.toISOString(), - }, - ]) - }) - - it("should return all rows matching the session user firstname when logical operator used", async () => { - await expectQuery({ - $and: { - conditions: [{ equal: { name: "{{ [user].firstName }}" } }], - }, - }).toContainExactly([ - { - name: config.getUser().firstName, - appointment: future.toISOString(), - }, - ]) - }) - - it("should parse the date binding and return all rows after the resolved value", async () => { - await tk.withFreeze(serverTime, async () => { - await expectQuery({ - range: { - appointment: { - low: "{{ [now] }}", - high: "9999-00-00T00:00:00.000Z", - }, - }, - }).toContainExactly([ - { - name: config.getUser().firstName, - appointment: future.toISOString(), - }, - { name: "serverDate", appointment: serverTime.toISOString() }, - ]) - }) - }) - - it("should parse the date binding and return all rows before the resolved value", async () => { - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: "{{ [now] }}", - }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - { name: "serverDate", appointment: serverTime.toISOString() }, - ]) - }) - - it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => { - const jsBinding = "return snippets.WeeksAgo();" - const encodedBinding = encodeJSBinding(jsBinding) - - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: encodedBinding, - }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - ]) - }) - - it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => { - const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();` - const encodedBinding = encodeJSBinding(jsBinding) - - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: encodedBinding, - }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - ]) - }) - - it("should match a single user row by the session user id", async () => { - await expectQuery({ - equal: { single_user: "{{ [user]._id }}" }, - }).toContainExactly([ - { - name: "single user, session user", - single_user: { _id: config.getUser()._id }, - }, - ]) - }) - - it("should match a deprecated single user row by the session user id", async () => { - await expectQuery({ - equal: { deprecated_single_user: "{{ [user]._id }}" }, - }).toContainExactly([ - { - name: "deprecated single user, session user", - deprecated_single_user: [{ _id: config.getUser()._id }], - }, - ]) - }) - - it("should match the session user id in a multi user field", async () => { - const allUsers = [...globalUsers, config.getUser()].map( - (user: any) => { - return { _id: user._id } - } - ) - - await expectQuery({ - contains: { multi_user: ["{{ [user]._id }}"] }, - }).toContainExactly([ - { - name: "multi user with session user", - multi_user: allUsers, - }, - ]) - }) - - it("should match the session user id in a deprecated multi user field", async () => { - const allUsers = [...globalUsers, config.getUser()].map( - (user: any) => { - return { _id: user._id } - } - ) - - await expectQuery({ - contains: { deprecated_multi_user: ["{{ [user]._id }}"] }, - }).toContainExactly([ - { - name: "deprecated multi user with session user", - deprecated_multi_user: allUsers, - }, - ]) - }) - - it("should not match the session user id in a multi user field", async () => { - await expectQuery({ - notContains: { multi_user: ["{{ [user]._id }}"] }, - notEmpty: { multi_user: true }, - }).toContainExactly([ - { - name: "multi user", - multi_user: globalUsers.map((user: any) => { - return { _id: user._id } - }), - }, - ]) - }) - - it("should not match the session user id in a deprecated multi user field", async () => { - await expectQuery({ - notContains: { deprecated_multi_user: ["{{ [user]._id }}"] }, - notEmpty: { deprecated_multi_user: true }, - }).toContainExactly([ - { - name: "deprecated multi user", - deprecated_multi_user: globalUsers.map((user: any) => { - return { _id: user._id } - }), - }, - ]) - }) - - it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => { - await expectQuery({ - oneOf: { - single_user: [ - "{{ default [user]._id '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "single user, session user", - single_user: { _id: config.getUser()._id }, - }, - { - name: "single user", - single_user: { _id: globalUsers[0]._id }, - }, - ]) - }) - - it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => { - await expectQuery({ - oneOf: { - deprecated_single_user: [ - "{{ default [user]._id '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "deprecated single user, session user", - deprecated_single_user: [{ _id: config.getUser()._id }], - }, - { - name: "deprecated single user", - deprecated_single_user: [{ _id: globalUsers[0]._id }], - }, - ]) - }) - - it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => { - await expectQuery({ - oneOf: { - single_user: [ - "{{ default [user]._idx '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "single user", - single_user: { _id: globalUsers[0]._id }, - }, - ]) - }) - - it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => { - await expectQuery({ - oneOf: { - deprecated_single_user: [ - "{{ default [user]._idx '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "deprecated single user", - deprecated_single_user: [{ _id: globalUsers[0]._id }], - }, - ]) - }) - }) - - const stringTypes = [FieldType.STRING, FieldType.LONGFORM] as const - describe.each(stringTypes)("%s", type => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { name: "name", type }, - }) - await createRows([{ name: "foo" }, { name: "bar" }]) - }) - - describe("misc", () => { - it("should return all if no query is passed", async () => { - await expectSearch({} as RowSearchParams).toContainExactly([ - { name: "foo" }, - { name: "bar" }, - ]) - }) - - it("should return all if empty query is passed", async () => { - await expectQuery({}).toContainExactly([ - { name: "foo" }, - { name: "bar" }, - ]) - }) - - it("should return all if onEmptyFilter is RETURN_ALL", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("should return nothing if onEmptyFilter is RETURN_NONE", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toFindNothing() + describe("boolean", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + isTrue: { name: "isTrue", type: FieldType.BOOLEAN }, + }) + await createRows([{ isTrue: true }, { isTrue: false }]) }) - it("should respect limit", async () => { - await expectSearch({ - limit: 1, - paginate: true, - query: {}, - }).toHaveLength(1) - }) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { name: "foo" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { name: "none" } }).toFindNothing() - }) - - it("works as an or condition", async () => { - await expectQuery({ - allOr: true, - equal: { name: "foo" }, - oneOf: { name: ["bar"] }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("can have multiple values for same column", async () => { - await expectQuery({ - allOr: true, - equal: { "1:name": "foo", "2:name": "bar" }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { name: "foo" } }).toContainExactly( - [{ name: "bar" }] - ) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { name: "bar" } }).toContainExactly( - [{ name: "foo" }] - ) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing() - }) - - it("can have multiple values for same column", async () => { - await expectQuery({ - oneOf: { - name: ["foo", "bar"], - }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("splits comma separated strings", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - name: "foo,bar", - }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("trims whitespace", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - name: "foo, bar", - }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("empty arrays returns all when onEmptyFilter is set to return 'all'", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - oneOf: { name: [] }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("empty arrays returns all when onEmptyFilter is set to return 'none'", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - oneOf: { name: [] }, - }).toContainExactly([]) - }) - }) - - describe("fuzzy", () => { - it("successfully finds a row", async () => { - await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ fuzzy: { name: "none" } }).toFindNothing() - }) - }) - - describe("string", () => { - it("successfully finds a row", async () => { - await expectQuery({ string: { name: "fo" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ string: { name: "none" } }).toFindNothing() - }) - - it("is case-insensitive", async () => { - await expectQuery({ string: { name: "FO" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - }) - - describe("range", () => { - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { name: { low: "a", high: "z" } }, - }).toContainExactly([{ name: "bar" }, { name: "foo" }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { name: { low: "a", high: "c" } }, - }).toContainExactly([{ name: "bar" }]) - }) - - it("successfully finds a row with a low bound", async () => { - await expectQuery({ - range: { name: { low: "f", high: "z" } }, - }).toContainExactly([{ name: "foo" }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { name: { low: "g", high: "h" } }, - }).toFindNothing() - }) - - it("ignores low if it's an empty object", async () => { - await expectQuery({ - // @ts-ignore - range: { name: { low: {}, high: "z" } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("ignores high if it's an empty object", async () => { - await expectQuery({ - // @ts-ignore - range: { name: { low: "a", high: {} } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - describe("empty", () => { - it("finds no empty rows", async () => { - await expectQuery({ empty: { name: null } }).toFindNothing() - }) - - it("should not be affected by when filter empty behaviour", async () => { - await expectQuery({ - empty: { name: null }, - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toFindNothing() - }) - }) - - describe("notEmpty", () => { - it("finds all non-empty rows", async () => { - await expectQuery({ notEmpty: { name: null } }).toContainExactly([ - { name: "foo" }, - { name: "bar" }, - ]) - }) - - it("should not be affected by when filter empty behaviour", async () => { - await expectQuery({ - notEmpty: { name: null }, - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) - }) - - describe("sortType STRING", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortType: SortType.STRING, - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortType: SortType.STRING, - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - !isInternal && - !isInMemory && - // This test was added because we automatically add in a sort by the - // primary key, and we used to do this unconditionally which caused - // problems because it was possible for the primary key to appear twice - // in the resulting SQL ORDER BY clause, resulting in an SQL error. - // We now check first to make sure that the primary key isn't already - // in the sort before adding it. - describe("sort on primary key", () => { - beforeAll(async () => { - const tableName = structures.uuid().substring(0, 10) - await client!.schema.createTable(tableName, t => { - t.string("name").primary() - }) - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - - tableOrViewId = resp.datasource.entities![tableName]._id! - - await createRows([{ name: "foo" }, { name: "bar" }]) + describe("equal", () => { + it("successfully finds true row", async () => { + await expectQuery({ equal: { isTrue: true } }).toMatchExactly( + [{ isTrue: true }] + ) }) - it("should be able to sort by a primary key column ascending", async () => - expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) - - it("should be able to sort by a primary key column descending", async () => - expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) - }) - }) - }) - - describe("numbers", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - age: { name: "age", type: FieldType.NUMBER }, - }) - await createRows([{ age: 1 }, { age: 10 }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { age: 1 } }).toContainExactly([ - { age: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { age: 2 } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { age: 1 } }).toContainExactly([ - { age: 10 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { age: 10 } }).toContainExactly([ - { age: 1 }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { age: [1] } }).toContainExactly([ - { age: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { age: [2] } }).toFindNothing() - }) - - it("can convert from a string", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - age: "1", - }, - }).toContainExactly([{ age: 1 }]) - }) - - it("can find multiple values for same column", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - age: "1,10", - }, - }).toContainExactly([{ age: 1 }, { age: 10 }]) - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { age: { low: 1, high: 5 } }, - }).toContainExactly([{ age: 1 }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { age: { low: 1, high: 10 } }, - }).toContainExactly([{ age: 1 }, { age: 10 }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { age: { low: 5, high: 10 } }, - }).toContainExactly([{ age: 10 }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { age: { low: 5, high: 9 } }, - }).toFindNothing() - }) - - it("greater than equal to", async () => { - await expectQuery({ - range: { - age: { low: 10, high: Number.MAX_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 10 }]) - }) - - it("greater than", async () => { - await expectQuery({ - range: { - age: { low: 5, high: Number.MAX_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 10 }]) - }) - - it("less than equal to", async () => { - await expectQuery({ - range: { - age: { high: 1, low: Number.MIN_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 1 }]) - }) - - it("less than", async () => { - await expectQuery({ - range: { - age: { high: 5, low: Number.MIN_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 1 }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }]) - }) - }) - - describe("sortType NUMBER", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortType: SortType.NUMBER, - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortType: SortType.NUMBER, - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }]) - }) - }) - }) - - describe("dates", () => { - const JAN_1ST = "2020-01-01T00:00:00.000Z" - const JAN_2ND = "2020-01-02T00:00:00.000Z" - const JAN_5TH = "2020-01-05T00:00:00.000Z" - const JAN_9TH = "2020-01-09T00:00:00.000Z" - const JAN_10TH = "2020-01-10T00:00:00.000Z" - - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - dob: { name: "dob", type: FieldType.DATETIME }, - }) - - await createRows([{ dob: JAN_1ST }, { dob: JAN_10TH }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([ - { dob: JAN_1ST }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notEqual: { dob: JAN_1ST }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notEqual: { dob: JAN_10TH }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly( - [{ dob: JAN_1ST }] - ) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { dob: { low: JAN_1ST, high: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { dob: { low: JAN_1ST, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { dob: { low: JAN_5TH, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { dob: { low: JAN_5TH, high: JAN_9TH } }, - }).toFindNothing() - }) - - it("greater than equal to", async () => { - await expectQuery({ - range: { - dob: { low: JAN_10TH, high: MAX_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("greater than", async () => { - await expectQuery({ - range: { - dob: { low: JAN_5TH, high: MAX_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("less than equal to", async () => { - await expectQuery({ - range: { - dob: { high: JAN_1ST, low: MIN_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - - it("less than", async () => { - await expectQuery({ - range: { - dob: { high: JAN_5TH, low: MIN_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) - }) - - describe("sortType STRING", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortType: SortType.STRING, - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + it("successfully finds false row", async () => { + await expectQuery({ + equal: { isTrue: false }, + }).toMatchExactly([{ isTrue: false }]) + }) }) - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortType: SortType.STRING, - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) - }) - }) - }) - }) + describe("notEqual", () => { + it("successfully finds false row", async () => { + await expectQuery({ + notEqual: { isTrue: true }, + }).toContainExactly([{ isTrue: false }]) + }) - !isInternal && - describe("datetime - time only", () => { - const T_1000 = "10:00:00" - const T_1045 = "10:45:00" - const T_1200 = "12:00:00" - const T_1530 = "15:30:00" - const T_0000 = "00:00:00" - - const UNEXISTING_TIME = "10:01:00" - - const NULL_TIME__ID = `null_time__id` - - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - timeid: { name: "timeid", type: FieldType.STRING }, - time: { - name: "time", - type: FieldType.DATETIME, - timeOnly: true, - }, + it("successfully finds true row", async () => { + await expectQuery({ + notEqual: { isTrue: false }, + }).toContainExactly([{ isTrue: true }]) + }) }) - await createRows([ - { timeid: NULL_TIME__ID, time: null }, - { time: T_1000 }, - { time: T_1045 }, - { time: T_1200 }, - { time: T_1530 }, - { time: T_0000 }, - ]) - }) + describe("oneOf", () => { + it("successfully finds true row", async () => { + await expectQuery({ + oneOf: { isTrue: [true] }, + }).toContainExactly([{ isTrue: true }]) + }) - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { time: T_1000 } }).toContainExactly( - [{ time: "10:00:00" }] - ) + it("successfully finds false row", async () => { + await expectQuery({ + oneOf: { isTrue: [false] }, + }).toContainExactly([{ isTrue: false }]) + }) }) - it("fails to find nonexistent row", async () => { - await expectQuery({ - equal: { time: UNEXISTING_TIME }, - }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notEqual: { time: T_1000 }, - }).toContainExactly([ - { timeid: NULL_TIME__ID }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - { time: "00:00:00" }, - ]) - }) - - it("return all when requesting non-existing", async () => { - await expectQuery({ - notEqual: { time: UNEXISTING_TIME }, - }).toContainExactly([ - { timeid: NULL_TIME__ID }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - { time: "00:00:00" }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ - oneOf: { time: [T_1000] }, - }).toContainExactly([{ time: "10:00:00" }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - oneOf: { time: [UNEXISTING_TIME] }, - }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { time: { low: T_1045, high: T_1045 } }, - }).toContainExactly([{ time: "10:45:00" }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { time: { low: T_1045, high: T_1530 } }, - }).toContainExactly([ - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - ]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { - time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME }, - }, - }).toFindNothing() - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "time", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([ - { timeid: NULL_TIME__ID }, - { time: "00:00:00" }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - ]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "time", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([ - { time: "15:30:00" }, - { time: "12:00:00" }, - { time: "10:45:00" }, - { time: "10:00:00" }, - { time: "00:00:00" }, - { timeid: NULL_TIME__ID }, - ]) - }) - - describe("sortType STRING", () => { + describe("sort", () => { it("sorts ascending", async () => { await expectSearch({ query: {}, - sort: "time", - sortType: SortType.STRING, + sort: "isTrue", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([ - { timeid: NULL_TIME__ID }, - { time: "00:00:00" }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - ]) + }).toMatchExactly([{ isTrue: false }, { isTrue: true }]) }) it("sorts descending", async () => { await expectSearch({ query: {}, - sort: "time", - sortType: SortType.STRING, + sort: "isTrue", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([ - { time: "15:30:00" }, - { time: "12:00:00" }, - { time: "10:45:00" }, - { time: "10:00:00" }, - { time: "00:00:00" }, - { timeid: NULL_TIME__ID }, - ]) + }).toMatchExactly([{ isTrue: true }, { isTrue: false }]) }) }) }) - }) - isInternal && - !isInMemory && - describe("AI Column", () => { - const UNEXISTING_AI_COLUMN = "Real LLM Response" + !isInMemory && + describe("bindings", () => { + let globalUsers: any = [] - beforeAll(async () => { - mocks.licenses.useBudibaseAI() - mocks.licenses.useAICustomConfigs() + const serverTime = new Date() - tableOrViewId = await createTableOrView({ - product: { name: "product", type: FieldType.STRING }, - ai: { - name: "AI", - type: FieldType.AI, - operation: AIOperationEnum.PROMPT, - prompt: "Translate '{{ product }}' into German", - }, - }) + // In MariaDB and MySQL we only store dates to second precision, so we need + // to remove milliseconds from the server time to ensure searches work as + // expected. + serverTime.setMilliseconds(0) - await createRows([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - - describe("equal", () => { - it("successfully finds rows based on AI column", async () => { - await expectQuery({ - equal: { ai: "Mock LLM Response" }, - }).toContainExactly([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - equal: { ai: UNEXISTING_AI_COLUMN }, - }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("Returns nothing when searching notEqual on the mock AI response", async () => { - await expectQuery({ - notEqual: { ai: "Mock LLM Response" }, - }).toContainExactly([]) - }) - - it("return all when requesting non-existing response", async () => { - await expectQuery({ - notEqual: { ai: "Real LLM Response" }, - }).toContainExactly([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ - oneOf: { ai: ["Mock LLM Response", "Other LLM Response"] }, - }).toContainExactly([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - oneOf: { ai: ["Whopper"] }, - }).toFindNothing() - }) - }) - }) - - describe("arrays", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - numbers: { - name: "numbers", - type: FieldType.ARRAY, - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["one", "two", "three"], - }, - }, - }) - await createRows([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - - describe("contains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - contains: { numbers: ["one"] }, - }).toContainExactly([{ numbers: ["one", "two"] }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - contains: { numbers: ["none"] }, - }).toFindNothing() - }) - - it("fails to find row containing all", async () => { - await expectQuery({ - contains: { numbers: ["one", "two", "three"] }, - }).toFindNothing() - }) - - it("finds all with empty list", async () => { - await expectQuery({ contains: { numbers: [] } }).toContainExactly( - [{ numbers: ["one", "two"] }, { numbers: ["three"] }] - ) - }) - }) - - describe("notContains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notContains: { numbers: ["one"] }, - }).toContainExactly([{ numbers: ["three"] }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notContains: { numbers: ["one", "two", "three"] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - - // Not sure if this is correct behaviour but changing it would be a - // breaking change. - it("finds all with empty list", async () => { - await expectQuery({ - notContains: { numbers: [] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - }) - - describe("containsAny", () => { - it("successfully finds rows", async () => { - await expectQuery({ - containsAny: { numbers: ["one", "two", "three"] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - containsAny: { numbers: ["none"] }, - }).toFindNothing() - }) - - it("finds all with empty list", async () => { - await expectQuery({ - containsAny: { numbers: [] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - }) - }) - - describe("bigints", () => { - const SMALL = "1" - const MEDIUM = "10000000" - - // Our bigints are int64s in most datasources. - let BIG = "9223372036854775807" - - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - num: { name: "num", type: FieldType.BIGINT }, - }) - await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { num: SMALL } }).toContainExactly([ - { num: SMALL }, - ]) - }) - - it("successfully finds a big value", async () => { - await expectQuery({ equal: { num: BIG } }).toContainExactly([ - { num: BIG }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { num: "2" } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { num: SMALL } }).toContainExactly([ - { num: MEDIUM }, - { num: BIG }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { num: 10 } }).toContainExactly([ - { num: SMALL }, - { num: MEDIUM }, - { num: BIG }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([ - { num: SMALL }, - ]) - }) - - it("successfully finds all rows", async () => { - await expectQuery({ - oneOf: { num: [SMALL, MEDIUM, BIG] }, - }).toContainExactly([ - { num: SMALL }, - { num: MEDIUM }, - { num: BIG }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { num: [2] } }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { num: { low: SMALL, high: "5" } }, - }).toContainExactly([{ num: SMALL }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { num: { low: SMALL, high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { num: { low: MEDIUM, high: BIG } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { num: { low: "5", high: "5" } }, - }).toFindNothing() - }) - - it("can search using just a low value", async () => { - await expectQuery({ - range: { num: { low: MEDIUM } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) - }) - - it("can search using just a high value", async () => { - await expectQuery({ - range: { num: { high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) - }) - }) - }) - - isInternal && - describe("auto", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - auto: { - name: "auto", - type: FieldType.AUTO, - autocolumn: true, - subtype: AutoFieldSubType.AUTO_ID, - }, - }) - await createRows(new Array(10).fill({})) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { auto: 1 } }).toContainExactly([ - { auto: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { auto: 0 } }).toFindNothing() - }) - }) - - describe("not equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { auto: 1 } }).toContainExactly([ - { auto: 2 }, - { auto: 3 }, - { auto: 4 }, - { auto: 5 }, - { auto: 6 }, - { auto: 7 }, - { auto: 8 }, - { auto: 9 }, - { auto: 10 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { auto: 0 } }).toContainExactly([ - { auto: 1 }, - { auto: 2 }, - { auto: 3 }, - { auto: 4 }, - { auto: 5 }, - { auto: 6 }, - { auto: 7 }, - { auto: 8 }, - { auto: 9 }, - { auto: 10 }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { auto: [1] } }).toContainExactly([ - { auto: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { auto: [0] } }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { auto: { low: 1, high: 1 } }, - }).toContainExactly([{ auto: 1 }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { auto: { low: 1, high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { auto: { low: 2, high: 2 } }, - }).toContainExactly([{ auto: 2 }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { auto: { low: 0, high: 0 } }, - }).toFindNothing() - }) - - it("can search using just a low value", async () => { - await expectQuery({ - range: { auto: { low: 9 } }, - }).toContainExactly([{ auto: 9 }, { auto: 10 }]) - }) - - it("can search using just a high value", async () => { - await expectQuery({ - range: { auto: { high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "auto", - sortOrder: SortOrder.ASCENDING, - sortType: SortType.NUMBER, - }).toMatchExactly([ - { auto: 1 }, - { auto: 2 }, - { auto: 3 }, - { auto: 4 }, - { auto: 5 }, - { auto: 6 }, - { auto: 7 }, - { auto: 8 }, - { auto: 9 }, - { auto: 10 }, - ]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "auto", - sortOrder: SortOrder.DESCENDING, - sortType: SortType.NUMBER, - }).toMatchExactly([ - { auto: 10 }, - { auto: 9 }, - { auto: 8 }, - { auto: 7 }, - { auto: 6 }, - { auto: 5 }, - { auto: 4 }, - { auto: 3 }, - { auto: 2 }, - { auto: 1 }, - ]) - }) - - // This is important for pagination. The order of results must always - // be stable or pagination will break. We don't want the user to need - // to specify an order for pagination to work. - it("is stable without a sort specified", async () => { - let { rows: fullRowList } = await config.api.row.search( - tableOrViewId, - { - tableId: tableOrViewId, - query: {}, - } + const future = new Date( + serverTime.getTime() + 1000 * 60 * 60 * 24 * 30 ) - // repeat the search many times to check the first row is always the same - let bookmark: string | number | undefined, - hasNextPage: boolean | undefined = true, - rowCount: number = 0 - do { - const response = await config.api.row.search(tableOrViewId, { - tableId: tableOrViewId, + const rows = (currentUser: User) => { + return [ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + { + name: currentUser.firstName, + appointment: future.toISOString(), + }, + { + name: "serverDate", + appointment: serverTime.toISOString(), + }, + { + name: "single user, session user", + single_user: currentUser, + }, + { + name: "single user", + single_user: globalUsers[0], + }, + { + name: "deprecated single user, session user", + deprecated_single_user: [currentUser], + }, + { + name: "deprecated single user", + deprecated_single_user: [globalUsers[0]], + }, + { + name: "multi user", + multi_user: globalUsers, + }, + { + name: "multi user with session user", + multi_user: [...globalUsers, currentUser], + }, + { + name: "deprecated multi user", + deprecated_multi_user: globalUsers, + }, + { + name: "deprecated multi user with session user", + deprecated_multi_user: [...globalUsers, currentUser], + }, + ] + } + + beforeAll(async () => { + // Set up some global users + globalUsers = await Promise.all( + Array(2) + .fill(0) + .map(async () => { + const globalUser = await config.globalUser() + const userMedataId = globalUser._id + ? dbCore.generateUserMetadataID(globalUser._id) + : null + return { + _id: globalUser._id, + _meta: userMedataId, + } + }) + ) + + tableOrViewId = await createTableOrView({ + name: { name: "name", type: FieldType.STRING }, + appointment: { + name: "appointment", + type: FieldType.DATETIME, + }, + single_user: { + name: "single_user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + deprecated_single_user: { + name: "deprecated_single_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + }, + multi_user: { + name: "multi_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }, + deprecated_multi_user: { + name: "deprecated_multi_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USERS, + constraints: { + type: "array", + }, + }, + }) + await createRows(rows(config.getUser())) + }) + + // !! Current User is auto generated per run + it("should return all rows matching the session user firstname", async () => { + await expectQuery({ + equal: { name: "{{ [user].firstName }}" }, + }).toContainExactly([ + { + name: config.getUser().firstName, + appointment: future.toISOString(), + }, + ]) + }) + + it("should return all rows matching the session user firstname when logical operator used", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { name: "{{ [user].firstName }}" } }, + ], + }, + }).toContainExactly([ + { + name: config.getUser().firstName, + appointment: future.toISOString(), + }, + ]) + }) + + it("should parse the date binding and return all rows after the resolved value", async () => { + await tk.withFreeze(serverTime, async () => { + await expectQuery({ + range: { + appointment: { + low: "{{ [now] }}", + high: "9999-00-00T00:00:00.000Z", + }, + }, + }).toContainExactly([ + { + name: config.getUser().firstName, + appointment: future.toISOString(), + }, + { + name: "serverDate", + appointment: serverTime.toISOString(), + }, + ]) + }) + }) + + it("should parse the date binding and return all rows before the resolved value", async () => { + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: "{{ [now] }}", + }, + }, + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + { + name: "serverDate", + appointment: serverTime.toISOString(), + }, + ]) + }) + + it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => { + const jsBinding = "return snippets.WeeksAgo();" + const encodedBinding = encodeJSBinding(jsBinding) + + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: encodedBinding, + }, + }, + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + ]) + }) + + it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => { + const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();` + const encodedBinding = encodeJSBinding(jsBinding) + + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: encodedBinding, + }, + }, + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + ]) + }) + + it("should match a single user row by the session user id", async () => { + await expectQuery({ + equal: { single_user: "{{ [user]._id }}" }, + }).toContainExactly([ + { + name: "single user, session user", + single_user: { _id: config.getUser()._id }, + }, + ]) + }) + + it("should match a deprecated single user row by the session user id", async () => { + await expectQuery({ + equal: { deprecated_single_user: "{{ [user]._id }}" }, + }).toContainExactly([ + { + name: "deprecated single user, session user", + deprecated_single_user: [{ _id: config.getUser()._id }], + }, + ]) + }) + + it("should match the session user id in a multi user field", async () => { + const allUsers = [...globalUsers, config.getUser()].map( + (user: any) => { + return { _id: user._id } + } + ) + + await expectQuery({ + contains: { multi_user: ["{{ [user]._id }}"] }, + }).toContainExactly([ + { + name: "multi user with session user", + multi_user: allUsers, + }, + ]) + }) + + it("should match the session user id in a deprecated multi user field", async () => { + const allUsers = [...globalUsers, config.getUser()].map( + (user: any) => { + return { _id: user._id } + } + ) + + await expectQuery({ + contains: { deprecated_multi_user: ["{{ [user]._id }}"] }, + }).toContainExactly([ + { + name: "deprecated multi user with session user", + deprecated_multi_user: allUsers, + }, + ]) + }) + + it("should not match the session user id in a multi user field", async () => { + await expectQuery({ + notContains: { multi_user: ["{{ [user]._id }}"] }, + notEmpty: { multi_user: true }, + }).toContainExactly([ + { + name: "multi user", + multi_user: globalUsers.map((user: any) => { + return { _id: user._id } + }), + }, + ]) + }) + + it("should not match the session user id in a deprecated multi user field", async () => { + await expectQuery({ + notContains: { + deprecated_multi_user: ["{{ [user]._id }}"], + }, + notEmpty: { deprecated_multi_user: true }, + }).toContainExactly([ + { + name: "deprecated multi user", + deprecated_multi_user: globalUsers.map((user: any) => { + return { _id: user._id } + }), + }, + ]) + }) + + it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => { + await expectQuery({ + oneOf: { + single_user: [ + "{{ default [user]._id '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "single user, session user", + single_user: { _id: config.getUser()._id }, + }, + { + name: "single user", + single_user: { _id: globalUsers[0]._id }, + }, + ]) + }) + + it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => { + await expectQuery({ + oneOf: { + deprecated_single_user: [ + "{{ default [user]._id '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "deprecated single user, session user", + deprecated_single_user: [{ _id: config.getUser()._id }], + }, + { + name: "deprecated single user", + deprecated_single_user: [{ _id: globalUsers[0]._id }], + }, + ]) + }) + + it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => { + await expectQuery({ + oneOf: { + single_user: [ + "{{ default [user]._idx '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "single user", + single_user: { _id: globalUsers[0]._id }, + }, + ]) + }) + + it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => { + await expectQuery({ + oneOf: { + deprecated_single_user: [ + "{{ default [user]._idx '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "deprecated single user", + deprecated_single_user: [{ _id: globalUsers[0]._id }], + }, + ]) + }) + }) + + const stringTypes = [FieldType.STRING, FieldType.LONGFORM] as const + describe.each(stringTypes)("%s", type => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { name: "name", type }, + }) + await createRows([{ name: "foo" }, { name: "bar" }]) + }) + + describe("misc", () => { + it("should return all if no query is passed", async () => { + await expectSearch({} as RowSearchParams).toContainExactly([ + { name: "foo" }, + { name: "bar" }, + ]) + }) + + it("should return all if empty query is passed", async () => { + await expectQuery({}).toContainExactly([ + { name: "foo" }, + { name: "bar" }, + ]) + }) + + it("should return all if onEmptyFilter is RETURN_ALL", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("should return nothing if onEmptyFilter is RETURN_NONE", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + }).toFindNothing() + }) + + it("should respect limit", async () => { + await expectSearch({ limit: 1, paginate: true, query: {}, - bookmark, - }) - bookmark = response.bookmark - hasNextPage = response.hasNextPage - expect(response.rows.length).toEqual(1) - const foundRow = response.rows[0] - expect(foundRow).toEqual(fullRowList[rowCount++]) - } while (hasNextPage) + }).toHaveLength(1) + }) }) - }) - describe("pagination", () => { - it("should paginate through all rows", async () => { - // @ts-ignore - let bookmark: string | number = undefined - let rows: Row[] = [] + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ + equal: { name: "foo" }, + }).toContainExactly([{ name: "foo" }]) + }) - // eslint-disable-next-line no-constant-condition - while (true) { - const response = await config.api.row.search(tableOrViewId, { - tableId: tableOrViewId, - limit: 3, + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { name: "none" } }).toFindNothing() + }) + + it("works as an or condition", async () => { + await expectQuery({ + allOr: true, + equal: { name: "foo" }, + oneOf: { name: ["bar"] }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("can have multiple values for same column", async () => { + await expectQuery({ + allOr: true, + equal: { "1:name": "foo", "2:name": "bar" }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { name: "foo" }, + }).toContainExactly([{ name: "bar" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { name: "bar" }, + }).toContainExactly([{ name: "foo" }]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { name: ["foo"] }, + }).toContainExactly([{ name: "foo" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { name: ["none"] }, + }).toFindNothing() + }) + + it("can have multiple values for same column", async () => { + await expectQuery({ + oneOf: { + name: ["foo", "bar"], + }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("splits comma separated strings", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + name: "foo,bar", + }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("trims whitespace", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + name: "foo, bar", + }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("empty arrays returns all when onEmptyFilter is set to return 'all'", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + oneOf: { name: [] }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("empty arrays returns all when onEmptyFilter is set to return 'none'", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + oneOf: { name: [] }, + }).toContainExactly([]) + }) + }) + + describe("fuzzy", () => { + it("successfully finds a row", async () => { + await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly( + [{ name: "foo" }] + ) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ fuzzy: { name: "none" } }).toFindNothing() + }) + }) + + describe("string", () => { + it("successfully finds a row", async () => { + await expectQuery({ + string: { name: "fo" }, + }).toContainExactly([{ name: "foo" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + string: { name: "none" }, + }).toFindNothing() + }) + + it("is case-insensitive", async () => { + await expectQuery({ + string: { name: "FO" }, + }).toContainExactly([{ name: "foo" }]) + }) + }) + + describe("range", () => { + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { name: { low: "a", high: "z" } }, + }).toContainExactly([{ name: "bar" }, { name: "foo" }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { name: { low: "a", high: "c" } }, + }).toContainExactly([{ name: "bar" }]) + }) + + it("successfully finds a row with a low bound", async () => { + await expectQuery({ + range: { name: { low: "f", high: "z" } }, + }).toContainExactly([{ name: "foo" }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { name: { low: "g", high: "h" } }, + }).toFindNothing() + }) + + it("ignores low if it's an empty object", async () => { + await expectQuery({ + // @ts-ignore + range: { name: { low: {}, high: "z" } }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("ignores high if it's an empty object", async () => { + await expectQuery({ + // @ts-ignore + range: { name: { low: "a", high: {} } }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) + + describe("empty", () => { + it("finds no empty rows", async () => { + await expectQuery({ empty: { name: null } }).toFindNothing() + }) + + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ + empty: { name: null }, + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + }).toFindNothing() + }) + }) + + describe("notEmpty", () => { + it("finds all non-empty rows", async () => { + await expectQuery({ + notEmpty: { name: null }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ + notEmpty: { name: null }, + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ query: {}, - bookmark, - paginate: true, + sort: "name", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) + + describe("sortType STRING", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "name", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) }) - rows.push(...response.rows) + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "name", + sortType: SortType.STRING, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) - if (!response.bookmark || !response.hasNextPage) { - break - } - bookmark = response.bookmark - } + !isInternal && + !isInMemory && + // This test was added because we automatically add in a sort by the + // primary key, and we used to do this unconditionally which caused + // problems because it was possible for the primary key to appear twice + // in the resulting SQL ORDER BY clause, resulting in an SQL error. + // We now check first to make sure that the primary key isn't already + // in the sort before adding it. + describe("sort on primary key", () => { + beforeAll(async () => { + const tableName = structures.uuid().substring(0, 10) + await client!.schema.createTable(tableName, t => { + t.string("name").primary() + }) + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) - const autoValues = rows - .map(row => row.auto) - .sort((a, b) => a - b) - expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + tableOrViewId = resp.datasource.entities![tableName]._id! + + await createRows([{ name: "foo" }, { name: "bar" }]) + }) + + it("should be able to sort by a primary key column ascending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + + it("should be able to sort by a primary key column descending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }) }) }) - }) - describe("field name 1:name", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - "1:name": { name: "1:name", type: FieldType.STRING }, - }) - await createRows([{ "1:name": "bar" }, { "1:name": "foo" }]) - }) - - it("successfully finds a row", async () => { - await expectQuery({ - equal: { "1:1:name": "bar" }, - }).toContainExactly([{ "1:name": "bar" }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing() - }) - }) - - isSql && - describe("related formulas", () => { - beforeAll(async () => { - const arrayTable = await createTable({ - name: { name: "name", type: FieldType.STRING }, - array: { - name: "array", - type: FieldType.ARRAY, - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["option 1", "option 2"], - }, - }, + describe("numbers", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + age: { name: "age", type: FieldType.NUMBER }, + }) + await createRows([{ age: 1 }, { age: 10 }]) }) - tableOrViewId = await createTableOrView({ - relationship: { - type: FieldType.LINK, - relationshipType: RelationshipType.MANY_TO_ONE, - name: "relationship", - fieldName: "relate", - tableId: arrayTable, - constraints: { - type: "array", - }, - }, - formula: { - type: FieldType.FORMULA, - name: "formula", - formula: encodeJSBinding( - `let array = [];$("relationship").forEach(rel => array = array.concat(rel.array));return array.sort().join(",")` - ), - }, - }) - const arrayRows = await Promise.all([ - config.api.row.save(arrayTable, { - name: "foo", - array: ["option 1"], - }), - config.api.row.save(arrayTable, { - name: "bar", - array: ["option 2"], - }), - ]) - await Promise.all([ - config.api.row.save(tableOrViewId, { - relationship: [arrayRows[0]._id, arrayRows[1]._id], - }), - ]) - }) - it("formula is correct with relationship arrays", async () => { - await expectQuery({}).toContain([ - { formula: "option 1,option 2" }, - ]) - }) - }) - - describe("user", () => { - let user1: User - let user2: User - - beforeAll(async () => { - user1 = await config.createUser({ _id: `us_${utils.newid()}` }) - user2 = await config.createUser({ _id: `us_${utils.newid()}` }) - - tableOrViewId = await createTableOrView({ - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - }) - - await createRows([{ user: user1 }, { user: user2 }, { user: null }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ - equal: { user: user1._id }, - }).toContainExactly([{ user: { _id: user1._id } }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { user: "us_none" } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notEqual: { user: user1._id }, - }).toContainExactly([{ user: { _id: user2._id } }, {}]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notEqual: { user: "us_none" }, - }).toContainExactly([ - { user: { _id: user1._id } }, - { user: { _id: user2._id } }, - {}, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ - oneOf: { user: [user1._id] }, - }).toContainExactly([{ user: { _id: user1._id } }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - oneOf: { user: ["us_none"] }, - }).toFindNothing() - }) - }) - - describe("empty", () => { - it("finds empty rows", async () => { - await expectQuery({ empty: { user: null } }).toContainExactly([ - {}, - ]) - }) - }) - - describe("notEmpty", () => { - it("finds non-empty rows", async () => { - await expectQuery({ notEmpty: { user: null } }).toContainExactly([ - { user: { _id: user1._id } }, - { user: { _id: user2._id } }, - ]) - }) - }) - }) - - describe("multi user", () => { - let user1: User - let user2: User - - beforeAll(async () => { - user1 = await config.createUser({ _id: `us_${utils.newid()}` }) - user2 = await config.createUser({ _id: `us_${utils.newid()}` }) - - tableOrViewId = await createTableOrView({ - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { type: "array" }, - }, - number: { - name: "number", - type: FieldType.NUMBER, - }, - }) - - await createRows([ - { number: 1, users: [user1] }, - { number: 2, users: [user2] }, - { number: 3, users: [user1, user2] }, - { number: 4, users: [] }, - ]) - }) - - describe("contains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - contains: { users: [user1._id] }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - ]) - }) - - it("successfully finds a row searching with a string", async () => { - await expectQuery({ - // @ts-expect-error this test specifically goes against the type to - // test that we coerce the string to an array. - contains: { "1:users": user1._id }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - contains: { users: ["us_none"] }, - }).toFindNothing() - }) - }) - - describe("notContains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notContains: { users: [user1._id] }, - }).toContainExactly([{ users: [{ _id: user2._id }] }, {}]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notContains: { users: ["us_none"] }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user2._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - {}, - ]) - }) - }) - - describe("containsAny", () => { - it("successfully finds rows", async () => { - await expectQuery({ - containsAny: { users: [user1._id, user2._id] }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user2._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - containsAny: { users: ["us_none"] }, - }).toFindNothing() - }) - }) - - describe("multi-column equals", () => { - it("successfully finds a row", async () => { - await expectQuery({ - equal: { number: 1 }, - contains: { users: [user1._id] }, - }).toContainExactly([{ users: [{ _id: user1._id }], number: 1 }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - equal: { number: 2 }, - contains: { users: [user1._id] }, - }).toFindNothing() - }) - }) - }) - - // It also can't work for in-memory searching because the related table name - // isn't available. - !isInMemory && - describe.each([ - RelationshipType.ONE_TO_MANY, - RelationshipType.MANY_TO_ONE, - RelationshipType.MANY_TO_MANY, - ])("relations (%s)", relationshipType => { - let productCategoryTable: Table, productCatRows: Row[] - - beforeAll(async () => { - const { relatedTable, tableId } = await basicRelationshipTables( - relationshipType - ) - tableOrViewId = tableId - productCategoryTable = relatedTable - - productCatRows = await Promise.all([ - config.api.row.save(productCategoryTable._id!, { name: "foo" }), - config.api.row.save(productCategoryTable._id!, { name: "bar" }), - ]) - - await Promise.all([ - config.api.row.save(tableOrViewId, { - name: "foo", - productCat: [productCatRows[0]._id], - }), - config.api.row.save(tableOrViewId, { - name: "bar", - productCat: [productCatRows[1]._id], - }), - config.api.row.save(tableOrViewId, { - name: "baz", - productCat: [], - }), - ]) - }) - - it("should be able to filter by relationship using column name", async () => { - await expectQuery({ - equal: { ["productCat.name"]: "foo" }, - }).toContainExactly([ - { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, - ]) - }) - - it("should be able to filter by relationship using table name", async () => { - await expectQuery({ - equal: { [`${productCategoryTable.name}.name`]: "foo" }, - }).toContainExactly([ - { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, - ]) - }) - - it("shouldn't return any relationship for last row", async () => { - await expectQuery({ - equal: { ["name"]: "baz" }, - }).toContainExactly([{ name: "baz", productCat: undefined }]) - }) - - describe("logical filters", () => { - const logicalOperators = [LogicalOperator.AND, LogicalOperator.OR] - - describe("$and", () => { - it("should allow single conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ equal: { age: 1 } }).toContainExactly([ + { age: 1 }, ]) }) - it("should allow exclusive conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }).toContainExactly([]) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { age: 2 } }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { age: 1 } }).toContainExactly([ + { age: 10 }, + ]) }) - it.each([logicalOperators])( - "should allow nested ands with single conditions (with %s as root)", - async rootOperator => { + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { age: 10 } }).toContainExactly( + [{ age: 1 }] + ) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { age: [1] } }).toContainExactly([ + { age: 1 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { age: [2] } }).toFindNothing() + }) + + it("can convert from a string", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + age: "1", + }, + }).toContainExactly([{ age: 1 }]) + }) + + it("can find multiple values for same column", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + age: "1,10", + }, + }).toContainExactly([{ age: 1 }, { age: 10 }]) + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { age: { low: 1, high: 5 } }, + }).toContainExactly([{ age: 1 }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { age: { low: 1, high: 10 } }, + }).toContainExactly([{ age: 1 }, { age: 10 }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { age: { low: 5, high: 10 } }, + }).toContainExactly([{ age: 10 }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { age: { low: 5, high: 9 } }, + }).toFindNothing() + }) + + it("greater than equal to", async () => { + await expectQuery({ + range: { + age: { low: 10, high: Number.MAX_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 10 }]) + }) + + it("greater than", async () => { + await expectQuery({ + range: { + age: { low: 5, high: Number.MAX_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 10 }]) + }) + + it("less than equal to", async () => { + await expectQuery({ + range: { + age: { high: 1, low: Number.MIN_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 1 }]) + }) + + it("less than", async () => { + await expectQuery({ + range: { + age: { high: 5, low: Number.MIN_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 1 }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) + }) + + describe("sortType NUMBER", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortType: SortType.NUMBER, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortType: SortType.NUMBER, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) + }) + }) + + describe("dates", () => { + const JAN_1ST = "2020-01-01T00:00:00.000Z" + const JAN_2ND = "2020-01-02T00:00:00.000Z" + const JAN_5TH = "2020-01-05T00:00:00.000Z" + const JAN_9TH = "2020-01-09T00:00:00.000Z" + const JAN_10TH = "2020-01-10T00:00:00.000Z" + + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + dob: { name: "dob", type: FieldType.DATETIME }, + }) + + await createRows([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ + equal: { dob: JAN_1ST }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { dob: JAN_1ST }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { dob: JAN_10TH }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { dob: [JAN_1ST] }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { dob: [JAN_2ND] }, + }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { dob: { low: JAN_1ST, high: JAN_5TH } }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { dob: { low: JAN_1ST, high: JAN_10TH } }, + }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { dob: { low: JAN_5TH, high: JAN_10TH } }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { dob: { low: JAN_5TH, high: JAN_9TH } }, + }).toFindNothing() + }) + + it("greater than equal to", async () => { + await expectQuery({ + range: { + dob: { + low: JAN_10TH, + high: MAX_VALID_DATE.toISOString(), + }, + }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("greater than", async () => { + await expectQuery({ + range: { + dob: { low: JAN_5TH, high: MAX_VALID_DATE.toISOString() }, + }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("less than equal to", async () => { + await expectQuery({ + range: { + dob: { high: JAN_1ST, low: MIN_VALID_DATE.toISOString() }, + }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("less than", async () => { + await expectQuery({ + range: { + dob: { high: JAN_5TH, low: MIN_VALID_DATE.toISOString() }, + }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) + + describe("sortType STRING", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortType: SortType.STRING, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) + }) + }) + }) + + !isInternal && + describe("datetime - time only", () => { + const T_1000 = "10:00:00" + const T_1045 = "10:45:00" + const T_1200 = "12:00:00" + const T_1530 = "15:30:00" + const T_0000 = "00:00:00" + + const UNEXISTING_TIME = "10:01:00" + + const NULL_TIME__ID = `null_time__id` + + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + timeid: { name: "timeid", type: FieldType.STRING }, + time: { + name: "time", + type: FieldType.DATETIME, + timeOnly: true, + }, + }) + + await createRows([ + { timeid: NULL_TIME__ID, time: null }, + { time: T_1000 }, + { time: T_1045 }, + { time: T_1200 }, + { time: T_1530 }, + { time: T_0000 }, + ]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { await expectQuery({ - [rootOperator]: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], + equal: { time: T_1000 }, + }).toContainExactly([{ time: "10:00:00" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { time: UNEXISTING_TIME }, + }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { time: T_1000 }, + }).toContainExactly([ + { timeid: NULL_TIME__ID }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + { time: "00:00:00" }, + ]) + }) + + it("return all when requesting non-existing", async () => { + await expectQuery({ + notEqual: { time: UNEXISTING_TIME }, + }).toContainExactly([ + { timeid: NULL_TIME__ID }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + { time: "00:00:00" }, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { time: [T_1000] }, + }).toContainExactly([{ time: "10:00:00" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { time: [UNEXISTING_TIME] }, + }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { time: { low: T_1045, high: T_1045 } }, + }).toContainExactly([{ time: "10:45:00" }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { time: { low: T_1045, high: T_1530 } }, + }).toContainExactly([ + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + ]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { + time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME }, + }, + }).toFindNothing() + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([ + { timeid: NULL_TIME__ID }, + { time: "00:00:00" }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + ]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([ + { time: "15:30:00" }, + { time: "12:00:00" }, + { time: "10:45:00" }, + { time: "10:00:00" }, + { time: "00:00:00" }, + { timeid: NULL_TIME__ID }, + ]) + }) + + describe("sortType STRING", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([ + { timeid: NULL_TIME__ID }, + { time: "00:00:00" }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + ]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortType: SortType.STRING, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([ + { time: "15:30:00" }, + { time: "12:00:00" }, + { time: "10:45:00" }, + { time: "10:00:00" }, + { time: "00:00:00" }, + { timeid: NULL_TIME__ID }, + ]) + }) + }) + }) + }) + + isInternal && + !isInMemory && + describe("AI Column", () => { + const UNEXISTING_AI_COLUMN = "Real LLM Response" + + beforeAll(async () => { + mocks.licenses.useBudibaseAI() + mocks.licenses.useAICustomConfigs() + + tableOrViewId = await createTableOrView({ + product: { name: "product", type: FieldType.STRING }, + ai: { + name: "AI", + type: FieldType.AI, + operation: AIOperationEnum.PROMPT, + prompt: "Translate '{{ product }}' into German", + }, + }) + + await createRows([ + { product: "Big Mac" }, + { product: "McCrispy" }, + ]) + }) + + describe("equal", () => { + it("successfully finds rows based on AI column", async () => { + await expectQuery({ + equal: { ai: "Mock LLM Response" }, + }).toContainExactly([ + { product: "Big Mac" }, + { product: "McCrispy" }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { ai: UNEXISTING_AI_COLUMN }, + }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("Returns nothing when searching notEqual on the mock AI response", async () => { + await expectQuery({ + notEqual: { ai: "Mock LLM Response" }, + }).toContainExactly([]) + }) + + it("return all when requesting non-existing response", async () => { + await expectQuery({ + notEqual: { ai: "Real LLM Response" }, + }).toContainExactly([ + { product: "Big Mac" }, + { product: "McCrispy" }, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { + ai: ["Mock LLM Response", "Other LLM Response"], }, }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, + { product: "Big Mac" }, + { product: "McCrispy" }, ]) - } - ) + }) - it.each([logicalOperators])( - "should allow nested ands with exclusive conditions (with %s as root)", - async rootOperator => { + it("fails to find nonexistent row", async () => { await expectQuery({ - [rootOperator]: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], - }, - }).toContainExactly([]) - } - ) - - it.each([logicalOperators])( - "should allow nested ands with multiple conditions (with %s as root)", - async rootOperator => { - await expectQuery({ - [rootOperator]: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }).toContainExactly([]) - } - ) + oneOf: { ai: ["Whopper"] }, + }).toFindNothing() + }) + }) }) - describe("$ors", () => { - it("should allow single conditions", async () => { + describe("arrays", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + numbers: { + name: "numbers", + type: FieldType.ARRAY, + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["one", "two", "three"], + }, + }, + }) + await createRows([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + + describe("contains", () => { + it("successfully finds a row", async () => { await expectQuery({ - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, + contains: { numbers: ["one"] }, + }).toContainExactly([{ numbers: ["one", "two"] }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + contains: { numbers: ["none"] }, + }).toFindNothing() + }) + + it("fails to find row containing all", async () => { + await expectQuery({ + contains: { numbers: ["one", "two", "three"] }, + }).toFindNothing() + }) + + it("finds all with empty list", async () => { + await expectQuery({ + contains: { numbers: [] }, }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + }) + + describe("notContains", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { numbers: ["one"] }, + }).toContainExactly([{ numbers: ["three"] }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notContains: { numbers: ["one", "two", "three"] }, + }).toContainExactly([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, ]) }) - it("should allow exclusive conditions", async () => { + // Not sure if this is correct behaviour but changing it would be a + // breaking change. + it("finds all with empty list", async () => { await expectQuery({ - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, + notContains: { numbers: [] }, }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + }) + + describe("containsAny", () => { + it("successfully finds rows", async () => { + await expectQuery({ + containsAny: { numbers: ["one", "two", "three"] }, + }).toContainExactly([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { numbers: ["none"] }, + }).toFindNothing() + }) + + it("finds all with empty list", async () => { + await expectQuery({ + containsAny: { numbers: [] }, + }).toContainExactly([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + }) + }) + + describe("bigints", () => { + const SMALL = "1" + const MEDIUM = "10000000" + + // Our bigints are int64s in most datasources. + let BIG = "9223372036854775807" + + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + num: { name: "num", type: FieldType.BIGINT }, + }) + await createRows([ + { num: SMALL }, + { num: MEDIUM }, + { num: BIG }, + ]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ equal: { num: SMALL } }).toContainExactly( + [{ num: SMALL }] + ) + }) + + it("successfully finds a big value", async () => { + await expectQuery({ equal: { num: BIG } }).toContainExactly([ + { num: BIG }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { num: "2" } }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { num: SMALL }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { num: 10 } }).toContainExactly( + [{ num: SMALL }, { num: MEDIUM }, { num: BIG }] + ) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { num: [SMALL] }, + }).toContainExactly([{ num: SMALL }]) + }) + + it("successfully finds all rows", async () => { + await expectQuery({ + oneOf: { num: [SMALL, MEDIUM, BIG] }, + }).toContainExactly([ + { num: SMALL }, + { num: MEDIUM }, + { num: BIG }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { num: [2] } }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { num: { low: SMALL, high: "5" } }, + }).toContainExactly([{ num: SMALL }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { num: { low: SMALL, high: MEDIUM } }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { num: { low: MEDIUM, high: BIG } }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { num: { low: "5", high: "5" } }, + }).toFindNothing() + }) + + it("can search using just a low value", async () => { + await expectQuery({ + range: { num: { low: MEDIUM } }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) + + it("can search using just a high value", async () => { + await expectQuery({ + range: { num: { high: MEDIUM } }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) + }) + }) + + isInternal && + describe("auto", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + auto: { + name: "auto", + type: FieldType.AUTO, + autocolumn: true, + subtype: AutoFieldSubType.AUTO_ID, }, - { + }) + await createRows(new Array(10).fill({})) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ equal: { auto: 1 } }).toContainExactly([ + { auto: 1 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { auto: 0 } }).toFindNothing() + }) + }) + + describe("not equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { auto: 1 }, + }).toContainExactly([ + { auto: 2 }, + { auto: 3 }, + { auto: 4 }, + { auto: 5 }, + { auto: 6 }, + { auto: 7 }, + { auto: 8 }, + { auto: 9 }, + { auto: 10 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { auto: 0 }, + }).toContainExactly([ + { auto: 1 }, + { auto: 2 }, + { auto: 3 }, + { auto: 4 }, + { auto: 5 }, + { auto: 6 }, + { auto: 7 }, + { auto: 8 }, + { auto: 9 }, + { auto: 10 }, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { auto: [1] }, + }).toContainExactly([{ auto: 1 }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { auto: [0] } }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { auto: { low: 1, high: 1 } }, + }).toContainExactly([{ auto: 1 }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { auto: { low: 1, high: 2 } }, + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { auto: { low: 2, high: 2 } }, + }).toContainExactly([{ auto: 2 }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { auto: { low: 0, high: 0 } }, + }).toFindNothing() + }) + + it("can search using just a low value", async () => { + await expectQuery({ + range: { auto: { low: 9 } }, + }).toContainExactly([{ auto: 9 }, { auto: 10 }]) + }) + + it("can search using just a high value", async () => { + await expectQuery({ + range: { auto: { high: 2 } }, + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "auto", + sortOrder: SortOrder.ASCENDING, + sortType: SortType.NUMBER, + }).toMatchExactly([ + { auto: 1 }, + { auto: 2 }, + { auto: 3 }, + { auto: 4 }, + { auto: 5 }, + { auto: 6 }, + { auto: 7 }, + { auto: 8 }, + { auto: 9 }, + { auto: 10 }, + ]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "auto", + sortOrder: SortOrder.DESCENDING, + sortType: SortType.NUMBER, + }).toMatchExactly([ + { auto: 10 }, + { auto: 9 }, + { auto: 8 }, + { auto: 7 }, + { auto: 6 }, + { auto: 5 }, + { auto: 4 }, + { auto: 3 }, + { auto: 2 }, + { auto: 1 }, + ]) + }) + + // This is important for pagination. The order of results must always + // be stable or pagination will break. We don't want the user to need + // to specify an order for pagination to work. + it("is stable without a sort specified", async () => { + let { rows: fullRowList } = await config.api.row.search( + tableOrViewId, + { + tableId: tableOrViewId, + query: {}, + } + ) + + // repeat the search many times to check the first row is always the same + let bookmark: string | number | undefined, + hasNextPage: boolean | undefined = true, + rowCount: number = 0 + do { + const response = await config.api.row.search( + tableOrViewId, + { + tableId: tableOrViewId, + limit: 1, + paginate: true, + query: {}, + bookmark, + } + ) + bookmark = response.bookmark + hasNextPage = response.hasNextPage + expect(response.rows.length).toEqual(1) + const foundRow = response.rows[0] + expect(foundRow).toEqual(fullRowList[rowCount++]) + } while (hasNextPage) + }) + }) + + describe("pagination", () => { + it("should paginate through all rows", async () => { + // @ts-ignore + let bookmark: string | number = undefined + let rows: Row[] = [] + + // eslint-disable-next-line no-constant-condition + while (true) { + const response = await config.api.row.search( + tableOrViewId, + { + tableId: tableOrViewId, + limit: 3, + query: {}, + bookmark, + paginate: true, + } + ) + + rows.push(...response.rows) + + if (!response.bookmark || !response.hasNextPage) { + break + } + bookmark = response.bookmark + } + + const autoValues = rows + .map(row => row.auto) + .sort((a, b) => a - b) + expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + }) + }) + }) + + describe("field name 1:name", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + "1:name": { name: "1:name", type: FieldType.STRING }, + }) + await createRows([{ "1:name": "bar" }, { "1:name": "foo" }]) + }) + + it("successfully finds a row", async () => { + await expectQuery({ + equal: { "1:1:name": "bar" }, + }).toContainExactly([{ "1:name": "bar" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { "1:1:name": "none" }, + }).toFindNothing() + }) + }) + + isSql && + describe("related formulas", () => { + beforeAll(async () => { + const arrayTable = await createTable({ + name: { name: "name", type: FieldType.STRING }, + array: { + name: "array", + type: FieldType.ARRAY, + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["option 1", "option 2"], + }, + }, + }) + tableOrViewId = await createTableOrView({ + relationship: { + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_ONE, + name: "relationship", + fieldName: "relate", + tableId: arrayTable, + constraints: { + type: "array", + }, + }, + formula: { + type: FieldType.FORMULA, + name: "formula", + formula: encodeJSBinding( + `let array = [];$("relationship").forEach(rel => array = array.concat(rel.array));return array.sort().join(",")` + ), + }, + }) + const arrayRows = await Promise.all([ + config.api.row.save(arrayTable, { + name: "foo", + array: ["option 1"], + }), + config.api.row.save(arrayTable, { name: "bar", - productCat: [{ _id: productCatRows[1]._id }], - }, - { name: "baz", productCat: undefined }, + array: ["option 2"], + }), + ]) + await Promise.all([ + config.api.row.save(tableOrViewId, { + relationship: [arrayRows[0]._id, arrayRows[1]._id], + }), ]) }) - it.each([logicalOperators])( - "should allow nested ors with single conditions (with %s as root)", - async rootOperator => { - await expectQuery({ - [rootOperator]: { - conditions: [ - { - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], - }, - }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, - ]) - } - ) + it("formula is correct with relationship arrays", async () => { + await expectQuery({}).toContain([ + { formula: "option 1,option 2" }, + ]) + }) + }) - it.each([logicalOperators])( - "should allow nested ors with exclusive conditions (with %s as root)", - async rootOperator => { - await expectQuery({ - [rootOperator]: { - conditions: [ - { - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], - }, - }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, - { - name: "bar", - productCat: [{ _id: productCatRows[1]._id }], - }, - { name: "baz", productCat: undefined }, - ]) - } - ) + describe("user", () => { + let user1: User + let user2: User - it("should allow nested ors with multiple conditions", async () => { + beforeAll(async () => { + user1 = await config.createUser({ _id: `us_${utils.newid()}` }) + user2 = await config.createUser({ _id: `us_${utils.newid()}` }) + + tableOrViewId = await createTableOrView({ + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + }) + + await createRows([ + { user: user1 }, + { user: user2 }, + { user: null }, + ]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { await expectQuery({ - $or: { - conditions: [ + equal: { user: user1._id }, + }).toContainExactly([{ user: { _id: user1._id } }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { user: "us_none" }, + }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { user: user1._id }, + }).toContainExactly([{ user: { _id: user2._id } }, {}]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { user: "us_none" }, + }).toContainExactly([ + { user: { _id: user1._id } }, + { user: { _id: user2._id } }, + {}, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { user: [user1._id] }, + }).toContainExactly([{ user: { _id: user1._id } }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { user: ["us_none"] }, + }).toFindNothing() + }) + }) + + describe("empty", () => { + it("finds empty rows", async () => { + await expectQuery({ empty: { user: null } }).toContainExactly( + [{}] + ) + }) + }) + + describe("notEmpty", () => { + it("finds non-empty rows", async () => { + await expectQuery({ + notEmpty: { user: null }, + }).toContainExactly([ + { user: { _id: user1._id } }, + { user: { _id: user2._id } }, + ]) + }) + }) + }) + + describe("multi user", () => { + let user1: User + let user2: User + + beforeAll(async () => { + user1 = await config.createUser({ _id: `us_${utils.newid()}` }) + user2 = await config.createUser({ _id: `us_${utils.newid()}` }) + + tableOrViewId = await createTableOrView({ + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { type: "array" }, + }, + number: { + name: "number", + type: FieldType.NUMBER, + }, + }) + + await createRows([ + { number: 1, users: [user1] }, + { number: 2, users: [user2] }, + { number: 3, users: [user1, user2] }, + { number: 4, users: [] }, + ]) + }) + + describe("contains", () => { + it("successfully finds a row", async () => { + await expectQuery({ + contains: { users: [user1._id] }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + + it("successfully finds a row searching with a string", async () => { + await expectQuery({ + // @ts-expect-error this test specifically goes against the type to + // test that we coerce the string to an array. + contains: { "1:users": user1._id }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + contains: { users: ["us_none"] }, + }).toFindNothing() + }) + }) + + describe("notContains", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { users: [user1._id] }, + }).toContainExactly([{ users: [{ _id: user2._id }] }, {}]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notContains: { users: ["us_none"] }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user2._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + {}, + ]) + }) + }) + + describe("containsAny", () => { + it("successfully finds rows", async () => { + await expectQuery({ + containsAny: { users: [user1._id, user2._id] }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user2._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { users: ["us_none"] }, + }).toFindNothing() + }) + }) + + describe("multi-column equals", () => { + it("successfully finds a row", async () => { + await expectQuery({ + equal: { number: 1 }, + contains: { users: [user1._id] }, + }).toContainExactly([ + { users: [{ _id: user1._id }], number: 1 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { number: 2 }, + contains: { users: [user1._id] }, + }).toFindNothing() + }) + }) + }) + + // It also can't work for in-memory searching because the related table name + // isn't available. + !isInMemory && + describe.each([ + RelationshipType.ONE_TO_MANY, + RelationshipType.MANY_TO_ONE, + RelationshipType.MANY_TO_MANY, + ])("relations (%s)", relationshipType => { + let productCategoryTable: Table, productCatRows: Row[] + + beforeAll(async () => { + const { relatedTable, tableId } = + await basicRelationshipTables(relationshipType) + tableOrViewId = tableId + productCategoryTable = relatedTable + + productCatRows = await Promise.all([ + config.api.row.save(productCategoryTable._id!, { + name: "foo", + }), + config.api.row.save(productCategoryTable._id!, { + name: "bar", + }), + ]) + + await Promise.all([ + config.api.row.save(tableOrViewId, { + name: "foo", + productCat: [productCatRows[0]._id], + }), + config.api.row.save(tableOrViewId, { + name: "bar", + productCat: [productCatRows[1]._id], + }), + config.api.row.save(tableOrViewId, { + name: "baz", + productCat: [], + }), + ]) + }) + + it("should be able to filter by relationship using column name", async () => { + await expectQuery({ + equal: { ["productCat.name"]: "foo" }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("should be able to filter by relationship using table name", async () => { + await expectQuery({ + equal: { [`${productCategoryTable.name}.name`]: "foo" }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("shouldn't return any relationship for last row", async () => { + await expectQuery({ + equal: { ["name"]: "baz" }, + }).toContainExactly([{ name: "baz", productCat: undefined }]) + }) + + describe("logical filters", () => { + const logicalOperators = [ + LogicalOperator.AND, + LogicalOperator.OR, + ] + + describe("$and", () => { + it("should allow single conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([ { - $or: { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("should allow exclusive conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([]) + }) + + it.each([logicalOperators])( + "should allow nested ands with single conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { conditions: [ { - equal: { ["productCat.name"]: "foo" }, + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, }, ], }, - notEqual: { ["productCat.name"]: "foo" }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + } + ) + + it.each([logicalOperators])( + "should allow nested ands with exclusive conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }, + ], + }, + }).toContainExactly([]) + } + ) + + it.each([logicalOperators])( + "should allow nested ands with multiple conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([]) + } + ) + }) + + describe("$ors", () => { + it("should allow single conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], }, - ], + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("should allow exclusive conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + { + name: "bar", + productCat: [{ _id: productCatRows[1]._id }], + }, + { name: "baz", productCat: undefined }, + ]) + }) + + it.each([logicalOperators])( + "should allow nested ors with single conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + } + ) + + it.each([logicalOperators])( + "should allow nested ors with exclusive conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + { + name: "bar", + productCat: [{ _id: productCatRows[1]._id }], + }, + { name: "baz", productCat: undefined }, + ]) + } + ) + + it("should allow nested ors with multiple conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + { + name: "bar", + productCat: [{ _id: productCatRows[1]._id }], + }, + { name: "baz", productCat: undefined }, + ]) + }) + }) + }) + }) + + isSql && + describe.each([ + RelationshipType.MANY_TO_ONE, + RelationshipType.MANY_TO_MANY, + ])("big relations (%s)", relationshipType => { + beforeAll(async () => { + const { relatedTable, tableId } = + await basicRelationshipTables(relationshipType) + tableOrViewId = tableId + const mainRow = await config.api.row.save(tableOrViewId, { + name: "foo", + }) + for (let i = 0; i < 11; i++) { + await config.api.row.save(relatedTable._id!, { + name: i, + product: [mainRow._id!], + }) + } + }) + + it("can only pull 10 related rows", async () => { + await withCoreEnv( + { SQL_MAX_RELATED_ROWS: "10" }, + async () => { + const response = await expectQuery({}).toContain([ + { name: "foo" }, + ]) + expect(response.rows[0].productCat).toBeArrayOfSize(10) + } + ) + }) + + it("can pull max rows when env not set (defaults to 500)", async () => { + const response = await expectQuery({}).toContain([ + { name: "foo" }, + ]) + expect(response.rows[0].productCat).toBeArrayOfSize(11) + }) + }) + + isSql && + describe("relations to same table", () => { + let relatedTable: string, relatedRows: Row[] + + beforeAll(async () => { + relatedTable = await createTable({ + name: { name: "name", type: FieldType.STRING }, + }) + tableOrViewId = await createTableOrView({ + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTable, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTable, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }) + relatedRows = await Promise.all([ + config.api.row.save(relatedTable, { name: "foo" }), + config.api.row.save(relatedTable, { name: "bar" }), + config.api.row.save(relatedTable, { name: "baz" }), + config.api.row.save(relatedTable, { name: "boo" }), + ]) + await Promise.all([ + config.api.row.save(tableOrViewId, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }), + config.api.row.save(tableOrViewId, { + name: "test2", + related1: [relatedRows[2]._id!], + related2: [relatedRows[3]._id!], + }), + config.api.row.save(tableOrViewId, { + name: "test3", + related1: [relatedRows[1]._id], + related2: [relatedRows[2]._id!], + }), + ]) + }) + + it("should be able to relate to same table", async () => { + await expectSearch({ + query: {}, + }).toContainExactly([ + { + name: "test", + related1: [{ _id: relatedRows[0]._id }], + related2: [{ _id: relatedRows[1]._id }], + }, + { + name: "test2", + related1: [{ _id: relatedRows[2]._id }], + related2: [{ _id: relatedRows[3]._id }], + }, + { + name: "test3", + related1: [{ _id: relatedRows[1]._id }], + related2: [{ _id: relatedRows[2]._id }], + }, + ]) + }) + + it("should be able to filter via the first relation field with equal", async () => { + await expectSearch({ + query: { + equal: { + ["related1.name"]: "baz", + }, }, }).toContainExactly([ { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], + name: "test2", + related1: [{ _id: relatedRows[2]._id }], }, + ]) + }) + + it("should be able to filter via the second relation field with not equal", async () => { + await expectSearch({ + query: { + notEqual: { + ["1:related2.name"]: "foo", + ["2:related2.name"]: "baz", + ["3:related2.name"]: "boo", + }, + }, + }).toContainExactly([ { - name: "bar", - productCat: [{ _id: productCatRows[1]._id }], + name: "test", + }, + ]) + }) + + it("should be able to filter on both fields", async () => { + await expectSearch({ + query: { + notEqual: { + ["related1.name"]: "foo", + ["related2.name"]: "baz", + }, + }, + }).toContainExactly([ + { + name: "test2", }, - { name: "baz", productCat: undefined }, ]) }) }) - }) - }) - isSql && - describe.each([ - RelationshipType.MANY_TO_ONE, - RelationshipType.MANY_TO_MANY, - ])("big relations (%s)", relationshipType => { - beforeAll(async () => { - const { relatedTable, tableId } = await basicRelationshipTables( - relationshipType - ) - tableOrViewId = tableId - const mainRow = await config.api.row.save(tableOrViewId, { - name: "foo", - }) - for (let i = 0; i < 11; i++) { - await config.api.row.save(relatedTable._id!, { - name: i, - product: [mainRow._id!], - }) - } - }) - - it("can only pull 10 related rows", async () => { - await withCoreEnv({ SQL_MAX_RELATED_ROWS: "10" }, async () => { - const response = await expectQuery({}).toContain([ - { name: "foo" }, - ]) - expect(response.rows[0].productCat).toBeArrayOfSize(10) - }) - }) - - it("can pull max rows when env not set (defaults to 500)", async () => { - const response = await expectQuery({}).toContain([ - { name: "foo" }, - ]) - expect(response.rows[0].productCat).toBeArrayOfSize(11) - }) - }) - - isSql && - describe("relations to same table", () => { - let relatedTable: string, relatedRows: Row[] - - beforeAll(async () => { - relatedTable = await createTable({ - name: { name: "name", type: FieldType.STRING }, - }) - tableOrViewId = await createTableOrView({ - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTable, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTable, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }) - relatedRows = await Promise.all([ - config.api.row.save(relatedTable, { name: "foo" }), - config.api.row.save(relatedTable, { name: "bar" }), - config.api.row.save(relatedTable, { name: "baz" }), - config.api.row.save(relatedTable, { name: "boo" }), - ]) - await Promise.all([ - config.api.row.save(tableOrViewId, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }), - config.api.row.save(tableOrViewId, { - name: "test2", - related1: [relatedRows[2]._id!], - related2: [relatedRows[3]._id!], - }), - config.api.row.save(tableOrViewId, { - name: "test3", - related1: [relatedRows[1]._id], - related2: [relatedRows[2]._id!], - }), - ]) - }) - - it("should be able to relate to same table", async () => { - await expectSearch({ - query: {}, - }).toContainExactly([ - { - name: "test", - related1: [{ _id: relatedRows[0]._id }], - related2: [{ _id: relatedRows[1]._id }], - }, - { - name: "test2", - related1: [{ _id: relatedRows[2]._id }], - related2: [{ _id: relatedRows[3]._id }], - }, - { - name: "test3", - related1: [{ _id: relatedRows[1]._id }], - related2: [{ _id: relatedRows[2]._id }], - }, - ]) - }) - - it("should be able to filter via the first relation field with equal", async () => { - await expectSearch({ - query: { - equal: { - ["related1.name"]: "baz", - }, - }, - }).toContainExactly([ - { - name: "test2", - related1: [{ _id: relatedRows[2]._id }], - }, - ]) - }) - - it("should be able to filter via the second relation field with not equal", async () => { - await expectSearch({ - query: { - notEqual: { - ["1:related2.name"]: "foo", - ["2:related2.name"]: "baz", - ["3:related2.name"]: "boo", - }, - }, - }).toContainExactly([ - { - name: "test", - }, - ]) - }) - - it("should be able to filter on both fields", async () => { - await expectSearch({ - query: { - notEqual: { - ["related1.name"]: "foo", - ["related2.name"]: "baz", - }, - }, - }).toContainExactly([ - { - name: "test2", - }, - ]) - }) - }) - - isInternal && - describe("no column error backwards compat", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - }) - - it("shouldn't error when column doesn't exist", async () => { - await expectSearch({ - query: { - string: { - "1:something": "a", - }, - }, - }).toMatch({ rows: [] }) - }) - }) - - describe("row counting", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - await createRows([{ name: "a" }, { name: "b" }]) - }) - - it("should be able to count rows when option set", async () => { - await expectSearch({ - countRows: true, - query: { - notEmpty: { - name: true, - }, - }, - }).toMatch({ totalRows: 2, rows: expect.any(Array) }) - }) - - it("shouldn't count rows when option is not set", async () => { - await expectSearch({ - countRows: false, - query: { - notEmpty: { - name: true, - }, - }, - }).toNotHaveProperty(["totalRows"]) - }) - }) - - describe("Invalid column definitions", () => { - beforeAll(async () => { - // need to create an invalid table - means ignoring typescript - tableOrViewId = await createTableOrView({ - // @ts-ignore - invalid: { - type: FieldType.STRING, - }, - name: { - name: "name", - type: FieldType.STRING, - }, - }) - await createRows([ - { name: "foo", invalid: "id1" }, - { name: "bar", invalid: "id2" }, - ]) - }) - - it("can get rows with all table data", async () => { - await expectSearch({ - query: {}, - }).toContain([ - { name: "foo", invalid: "id1" }, - { name: "bar", invalid: "id2" }, - ]) - }) - }) - - describe.each(["data_name_test", "name_data_test", "name_test_data_"])( - "special (%s) case", - column => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - [column]: { - name: column, - type: FieldType.STRING, - }, - }) - await createRows([{ [column]: "a" }, { [column]: "b" }]) - }) - - it("should be able to query a column with data_ in it", async () => { - await expectSearch({ - query: { - equal: { - [`1:${column}`]: "a", - }, - }, - }).toContainExactly([{ [column]: "a" }]) - }) - } - ) - - isInternal && - describe("sample data", () => { - beforeAll(async () => { - await config.api.application.addSampleData(config.appId!) - tableOrViewId = DEFAULT_EMPLOYEE_TABLE_SCHEMA._id! - rows = await config.api.row.fetch(tableOrViewId) - }) - - it("should be able to search sample data", async () => { - await expectSearch({ - query: {}, - }).toContain([ - { - "First Name": "Mandy", - }, - ]) - }) - }) - - describe.each([ - { low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" }, - { low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" }, - { low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, - { low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, - ])("date special cases", ({ low, high }) => { - const earlyDate = "2024-07-03T10:00:00.000Z", - laterDate = "2024-07-03T11:00:00.000Z" - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - date: { - name: "date", - type: FieldType.DATETIME, - }, - }) - await createRows([{ date: earlyDate }, { date: laterDate }]) - }) - - it("should be able to handle a date search", async () => { - await expectSearch({ - query: { - range: { - "1:date": { low, high }, - }, - }, - }).toContainExactly([{ date: earlyDate }, { date: laterDate }]) - }) - }) - - describe.each([ - "名前", // Japanese for "name" - "Benutzer-ID", // German for "user ID", includes a hyphen - "numéro", // French for "number", includes an accent - "år", // Swedish for "year", includes a ring above - "naïve", // English word borrowed from French, includes an umlaut - "الاسم", // Arabic for "name" - "оплата", // Russian for "payment" - "पता", // Hindi for "address" - "用戶名", // Chinese for "username" - "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla - "preço", // Portuguese for "price", includes a cedilla - "사용자명", // Korean for "username" - "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" - "файл", // Bulgarian for "file" - "δεδομένα", // Greek for "data" - "geändert_am", // German for "modified on", includes an umlaut - "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore - "São_Paulo", // Portuguese, includes an underscore and a tilde - "età", // Italian for "age", includes an accent - "ชื่อผู้ใช้", // Thai for "username" - ])("non-ascii column name: %s", name => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - [name]: { - name, - type: FieldType.STRING, - }, - }) - await createRows([{ [name]: "a" }, { [name]: "b" }]) - }) - - it("should be able to query a column with non-ascii characters", async () => { - await expectSearch({ - query: { - equal: { - [`1:${name}`]: "a", - }, - }, - }).toContainExactly([{ [name]: "a" }]) - }) - }) - - // This is currently not supported in external datasources, it produces SQL - // errors at time of writing. We supported it (potentially by accident) in - // Lucene, though, so we need to make sure it's supported in SQS as well. We - // found real cases in production of column names ending in a space. - isInternal && - describe("space at end of column name", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - "name ": { - name: "name ", - type: FieldType.STRING, - }, - }) - await createRows([{ ["name "]: "foo" }, { ["name "]: "bar" }]) - }) - - it("should be able to query a column that ends with a space", async () => { - await expectSearch({ - query: { - string: { - "name ": "foo", - }, - }, - }).toContainExactly([{ ["name "]: "foo" }]) - }) - - it("should be able to query a column that ends with a space using numeric notation", async () => { - await expectSearch({ - query: { - string: { - "1:name ": "foo", - }, - }, - }).toContainExactly([{ ["name "]: "foo" }]) - }) - }) - - isInternal && - describe("space at start of column name", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - " name": { - name: " name", - type: FieldType.STRING, - }, - }) - await createRows([{ [" name"]: "foo" }, { [" name"]: "bar" }]) - }) - - it("should be able to query a column that starts with a space", async () => { - await expectSearch({ - query: { - string: { - " name": "foo", - }, - }, - }).toContainExactly([{ [" name"]: "foo" }]) - }) - - it("should be able to query a column that starts with a space using numeric notation", async () => { - await expectSearch({ - query: { - string: { - "1: name": "foo", - }, - }, - }).toContainExactly([{ [" name"]: "foo" }]) - }) - }) - - isInternal && - !isView && - describe("duplicate columns", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - const tableDoc = await db.get
(tableOrViewId) - tableDoc.schema.Name = { - name: "Name", - type: FieldType.STRING, - } - try { - // remove the SQLite definitions so that they can be rebuilt as part of the search - const sqliteDoc = await db.get(SQLITE_DESIGN_DOC_ID) - await db.remove(sqliteDoc) - } catch (err) { - // no-op - } - }) - await createRows([{ name: "foo", Name: "bar" }]) - }) - - it("should handle invalid duplicate column names", async () => { - await expectSearch({ - query: {}, - }).toContainExactly([{ name: "foo" }]) - }) - }) - - !isInMemory && - describe("search by _id", () => { - let row: Row - - beforeAll(async () => { - const toRelateTable = await createTable({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - rel: { - name: "rel", - type: FieldType.LINK, - relationshipType: RelationshipType.MANY_TO_MANY, - tableId: toRelateTable, - fieldName: "rel", - }, - }) - const [row1, row2] = await Promise.all([ - config.api.row.save(toRelateTable, { name: "tag 1" }), - config.api.row.save(toRelateTable, { name: "tag 2" }), - ]) - row = await config.api.row.save(tableOrViewId, { - name: "product 1", - rel: [row1._id, row2._id], - }) - }) - - it("can filter by the row ID with limit 1", async () => { - await expectSearch({ - query: { - equal: { _id: row._id }, - }, - limit: 1, - }).toContainExactly([row]) - }) - }) - - !isInternal && - describe("search by composite key", () => { - beforeAll(async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - idColumn1: { - name: "idColumn1", - type: FieldType.NUMBER, - }, - idColumn2: { - name: "idColumn2", - type: FieldType.NUMBER, - }, - }, - primary: ["idColumn1", "idColumn2"], - }) - ) - tableOrViewId = table._id! - await createRows([{ idColumn1: 1, idColumn2: 2 }]) - }) - - it("can filter by the row ID with limit 1", async () => { - await expectSearch({ - query: { - equal: { _id: generateRowIdField([1, 2]) }, - }, - limit: 1, - }).toContain([ - { - idColumn1: 1, - idColumn2: 2, - }, - ]) - }) - }) - - isSql && - describe("primaryDisplay", () => { - beforeAll(async () => { - let toRelateTableId = await createTable({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - link: { - name: "link", - type: FieldType.LINK, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: toRelateTableId, - fieldName: "link", - }, - }) - - const toRelateTable = await config.api.table.get(toRelateTableId) - await config.api.table.save({ - ...toRelateTable, - primaryDisplay: "link", - }) - const relatedRows = await Promise.all([ - config.api.row.save(toRelateTable._id!, { name: "related" }), - ]) - await config.api.row.save(tableOrViewId, { - name: "test", - link: relatedRows.map(row => row._id), - }) - }) - - it("should be able to query, primary display on related table shouldn't be used", async () => { - // this test makes sure that if a relationship has been specified as the primary display on a table - // it is ignored and another column is used instead - await expectQuery({}).toContain([ - { name: "test", link: [{ primaryDisplay: "related" }] }, - ]) - }) - }) - - describe("$and", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - age: { name: "age", type: FieldType.NUMBER }, - name: { name: "name", type: FieldType.STRING }, - }) - await createRows([ - { age: 1, name: "Jane" }, - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds a row for one level condition", async () => { - await expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([{ age: 10, name: "Jack" }]) - }) - - it("successfully finds a row for one level with multiple conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([{ age: 10, name: "Jack" }]) - }) - - it("successfully finds multiple rows for one level with multiple conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { range: { age: { low: 1, high: 9 } } }, - { string: { name: "Ja" } }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds rows for nested filters", async () => { - await expectQuery({ - $and: { - conditions: [ - { - $and: { - conditions: [ - { - range: { age: { low: 1, high: 10 } }, - }, - { string: { name: "Ja" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([{ age: 1, name: "Jane" }]) - }) - - it("returns nothing when filtering out all data", async () => { - await expectQuery({ - $and: { - conditions: [ - { equal: { age: 7 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toFindNothing() - }) - - !isInMemory && - it("validates conditions that are not objects", async () => { - await expect( - expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - "invalidCondition" as any, - ], - }, - }).toFindNothing() - ).rejects.toThrow( - 'Invalid body - "query.$and.conditions[1]" must be of type object' - ) - }) - - !isInMemory && - it("validates $and without conditions", async () => { - await expect( - expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - { - $and: { - conditions: undefined as any, - }, - }, - ], - }, - }).toFindNothing() - ).rejects.toThrow( - 'Invalid body - "query.$and.conditions[1].$and.conditions" is required' - ) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("returns no rows when onEmptyFilter set to none", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - $and: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toFindNothing() - }) - - it("returns all rows when onEmptyFilter set to all", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toHaveLength(4) - }) - }) - - describe("$or", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - age: { name: "age", type: FieldType.NUMBER }, - name: { name: "name", type: FieldType.STRING }, - }) - await createRows([ - { age: 1, name: "Jane" }, - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds a row for one level condition", async () => { - await expectQuery({ - $or: { - conditions: [ - { equal: { age: 7 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([ - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - ]) - }) - - it("successfully finds a row for one level with multiple conditions", async () => { - await expectQuery({ - $or: { - conditions: [ - { equal: { age: 7 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([ - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - ]) - }) - - it("successfully finds multiple rows for one level with multiple conditions", async () => { - await expectQuery({ - $or: { - conditions: [ - { range: { age: { low: 1, high: 9 } } }, - { string: { name: "Jan" } }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds rows for nested filters", async () => { - await expectQuery({ - $or: { - conditions: [ - { - $or: { - conditions: [ - { - range: { age: { low: 1, high: 7 } }, - }, - { string: { name: "Jan" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("returns nothing when filtering out all data", async () => { - await expectQuery({ - $or: { - conditions: [ - { equal: { age: 6 } }, - { equal: { name: "John" } }, - ], - }, - }).toFindNothing() - }) - - it("can nest $and under $or filters", async () => { - await expectQuery({ - $or: { - conditions: [ - { - $and: { - conditions: [ - { - range: { age: { low: 1, high: 8 } }, - }, - { equal: { name: "Jan" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("can nest $or under $and filters", async () => { - await expectQuery({ - $and: { - conditions: [ - { - $or: { - conditions: [ - { - range: { age: { low: 1, high: 8 } }, - }, - { equal: { name: "Jan" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([{ age: 1, name: "Jane" }]) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("returns no rows when onEmptyFilter set to none", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - $or: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toFindNothing() - }) - - it("returns all rows when onEmptyFilter set to all", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $or: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toHaveLength(4) - }) - }) - - isSql && - describe("max related columns", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedSchema: TableSchema = {} - const row: Row = {} - for (let i = 0; i < 100; i++) { - const name = `column${i}` - relatedSchema[name] = { name, type: FieldType.NUMBER } - row[name] = i - } - const relatedTable = await createTable(relatedSchema) - tableOrViewId = await createTableOrView({ - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTable, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }) - relatedRows = await Promise.all([ - config.api.row.save(relatedTable, row), - ]) - await config.api.row.save(tableOrViewId, { - name: "foo", - related1: [relatedRows[0]._id], - }) - }) - - it("retrieve the row with relationships", async () => { - await expectQuery({}).toContainExactly([ - { - name: "foo", - related1: [{ _id: relatedRows[0]._id }], - }, - ]) - }) - }) - - !isInternal && - describe("SQL injection", () => { - const badStrings = [ - "1; DROP TABLE %table_name%;", - "1; DELETE FROM %table_name%;", - "1; UPDATE %table_name% SET name = 'foo';", - "1; INSERT INTO %table_name% (name) VALUES ('foo');", - "' OR '1'='1' --", - "'; DROP TABLE %table_name%; --", - "' OR 1=1 --", - "' UNION SELECT null, null, null; --", - "' AND (SELECT COUNT(*) FROM %table_name%) > 0 --", - "\"; EXEC xp_cmdshell('dir'); --", - "\"' OR 'a'='a", - "OR 1=1;", - "'; SHUTDOWN --", - ] - - describe.each(badStrings)("bad string: %s", badStringTemplate => { - // The SQL that knex generates when you try to use a double quote in a - // field name is always invalid and never works, so we skip it for these - // tests. - const skipFieldNameCheck = - isOracle && badStringTemplate.includes('"') - - !skipFieldNameCheck && - it("should not allow SQL injection as a field name", async () => { - const tableOrViewId = await createTableOrView() - const table = await getTable(tableOrViewId) - const badString = badStringTemplate.replace( - /%table_name%/g, - table.name - ) - - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - [badString]: { name: badString, type: FieldType.STRING }, + isInternal && + describe("no column error backwards compat", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, }, }) - - if (docIds.isViewId(tableOrViewId)) { - const view = await config.api.viewV2.get(tableOrViewId) - await config.api.viewV2.update({ - ...view, - schema: { - [badString]: { visible: true }, - }, - }) - } - - await config.api.row.save(tableOrViewId, { - [badString]: "foo", - }) - - await assertTableExists(table) - await assertTableNumRows(table, 1) - - const { rows } = await config.api.row.search( - tableOrViewId, - { query: {} }, - { status: 200 } - ) - - expect(rows).toHaveLength(1) - - await assertTableExists(table) - await assertTableNumRows(table, 1) }) - it("should not allow SQL injection as a field value", async () => { - const tableOrViewId = await createTableOrView({ - foo: { - name: "foo", + it("shouldn't error when column doesn't exist", async () => { + await expectSearch({ + query: { + string: { + "1:something": "a", + }, + }, + }).toMatch({ rows: [] }) + }) + }) + + describe("row counting", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { + name: "name", type: FieldType.STRING, }, }) - const table = await getTable(tableOrViewId) - const badString = badStringTemplate.replace( - /%table_name%/g, - table.name - ) + await createRows([{ name: "a" }, { name: "b" }]) + }) - await config.api.row.save(tableOrViewId, { foo: "foo" }) + it("should be able to count rows when option set", async () => { + await expectSearch({ + countRows: true, + query: { + notEmpty: { + name: true, + }, + }, + }).toMatch({ totalRows: 2, rows: expect.any(Array) }) + }) - await assertTableExists(table) - await assertTableNumRows(table, 1) - - const { rows } = await config.api.row.search( - tableOrViewId, - { query: { equal: { foo: badString } } }, - { status: 200 } - ) - - expect(rows).toBeEmpty() - await assertTableExists(table) - await assertTableNumRows(table, 1) + it("shouldn't count rows when option is not set", async () => { + await expectSearch({ + countRows: false, + query: { + notEmpty: { + name: true, + }, + }, + }).toNotHaveProperty(["totalRows"]) }) }) - }) + + describe("Invalid column definitions", () => { + beforeAll(async () => { + // need to create an invalid table - means ignoring typescript + tableOrViewId = await createTableOrView({ + // @ts-ignore + invalid: { + type: FieldType.STRING, + }, + name: { + name: "name", + type: FieldType.STRING, + }, + }) + await createRows([ + { name: "foo", invalid: "id1" }, + { name: "bar", invalid: "id2" }, + ]) + }) + + it("can get rows with all table data", async () => { + await expectSearch({ + query: {}, + }).toContain([ + { name: "foo", invalid: "id1" }, + { name: "bar", invalid: "id2" }, + ]) + }) + }) + + describe.each([ + "data_name_test", + "name_data_test", + "name_test_data_", + ])("special (%s) case", column => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + [column]: { + name: column, + type: FieldType.STRING, + }, + }) + await createRows([{ [column]: "a" }, { [column]: "b" }]) + }) + + it("should be able to query a column with data_ in it", async () => { + await expectSearch({ + query: { + equal: { + [`1:${column}`]: "a", + }, + }, + }).toContainExactly([{ [column]: "a" }]) + }) + }) + + isInternal && + describe("sample data", () => { + beforeAll(async () => { + await config.api.application.addSampleData(config.appId!) + tableOrViewId = DEFAULT_EMPLOYEE_TABLE_SCHEMA._id! + rows = await config.api.row.fetch(tableOrViewId) + }) + + it("should be able to search sample data", async () => { + await expectSearch({ + query: {}, + }).toContain([ + { + "First Name": "Mandy", + }, + ]) + }) + }) + + describe.each([ + { + low: "2024-07-03T00:00:00.000Z", + high: "9999-00-00T00:00:00.000Z", + }, + { + low: "2024-07-03T00:00:00.000Z", + high: "9998-00-00T00:00:00.000Z", + }, + { + low: "0000-00-00T00:00:00.000Z", + high: "2024-07-04T00:00:00.000Z", + }, + { + low: "0001-00-00T00:00:00.000Z", + high: "2024-07-04T00:00:00.000Z", + }, + ])("date special cases", ({ low, high }) => { + const earlyDate = "2024-07-03T10:00:00.000Z", + laterDate = "2024-07-03T11:00:00.000Z" + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + date: { + name: "date", + type: FieldType.DATETIME, + }, + }) + await createRows([{ date: earlyDate }, { date: laterDate }]) + }) + + it("should be able to handle a date search", async () => { + await expectSearch({ + query: { + range: { + "1:date": { low, high }, + }, + }, + }).toContainExactly([{ date: earlyDate }, { date: laterDate }]) + }) + }) + + describe.each([ + "名前", // Japanese for "name" + "Benutzer-ID", // German for "user ID", includes a hyphen + "numéro", // French for "number", includes an accent + "år", // Swedish for "year", includes a ring above + "naïve", // English word borrowed from French, includes an umlaut + "الاسم", // Arabic for "name" + "оплата", // Russian for "payment" + "पता", // Hindi for "address" + "用戶名", // Chinese for "username" + "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla + "preço", // Portuguese for "price", includes a cedilla + "사용자명", // Korean for "username" + "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" + "файл", // Bulgarian for "file" + "δεδομένα", // Greek for "data" + "geändert_am", // German for "modified on", includes an umlaut + "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore + "São_Paulo", // Portuguese, includes an underscore and a tilde + "età", // Italian for "age", includes an accent + "ชื่อผู้ใช้", // Thai for "username" + ])("non-ascii column name: %s", name => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + [name]: { + name, + type: FieldType.STRING, + }, + }) + await createRows([{ [name]: "a" }, { [name]: "b" }]) + }) + + it("should be able to query a column with non-ascii characters", async () => { + await expectSearch({ + query: { + equal: { + [`1:${name}`]: "a", + }, + }, + }).toContainExactly([{ [name]: "a" }]) + }) + }) + + // This is currently not supported in external datasources, it produces SQL + // errors at time of writing. We supported it (potentially by accident) in + // Lucene, though, so we need to make sure it's supported in SQS as well. We + // found real cases in production of column names ending in a space. + isInternal && + describe("space at end of column name", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + "name ": { + name: "name ", + type: FieldType.STRING, + }, + }) + await createRows([{ ["name "]: "foo" }, { ["name "]: "bar" }]) + }) + + it("should be able to query a column that ends with a space", async () => { + await expectSearch({ + query: { + string: { + "name ": "foo", + }, + }, + }).toContainExactly([{ ["name "]: "foo" }]) + }) + + it("should be able to query a column that ends with a space using numeric notation", async () => { + await expectSearch({ + query: { + string: { + "1:name ": "foo", + }, + }, + }).toContainExactly([{ ["name "]: "foo" }]) + }) + }) + + isInternal && + describe("space at start of column name", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + " name": { + name: " name", + type: FieldType.STRING, + }, + }) + await createRows([{ [" name"]: "foo" }, { [" name"]: "bar" }]) + }) + + it("should be able to query a column that starts with a space", async () => { + await expectSearch({ + query: { + string: { + " name": "foo", + }, + }, + }).toContainExactly([{ [" name"]: "foo" }]) + }) + + it("should be able to query a column that starts with a space using numeric notation", async () => { + await expectSearch({ + query: { + string: { + "1: name": "foo", + }, + }, + }).toContainExactly([{ [" name"]: "foo" }]) + }) + }) + + isInternal && + !isView && + describe("duplicate columns", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + const tableDoc = await db.get
(tableOrViewId) + tableDoc.schema.Name = { + name: "Name", + type: FieldType.STRING, + } + try { + // remove the SQLite definitions so that they can be rebuilt as part of the search + const sqliteDoc = await db.get(SQLITE_DESIGN_DOC_ID) + await db.remove(sqliteDoc) + } catch (err) { + // no-op + } + }) + await createRows([{ name: "foo", Name: "bar" }]) + }) + + it("should handle invalid duplicate column names", async () => { + await expectSearch({ + query: {}, + }).toContainExactly([{ name: "foo" }]) + }) + }) + + !isInMemory && + describe("search by _id", () => { + let row: Row + + beforeAll(async () => { + const toRelateTable = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, + }, + rel: { + name: "rel", + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_MANY, + tableId: toRelateTable, + fieldName: "rel", + }, + }) + const [row1, row2] = await Promise.all([ + config.api.row.save(toRelateTable, { name: "tag 1" }), + config.api.row.save(toRelateTable, { name: "tag 2" }), + ]) + row = await config.api.row.save(tableOrViewId, { + name: "product 1", + rel: [row1._id, row2._id], + }) + }) + + it("can filter by the row ID with limit 1", async () => { + await expectSearch({ + query: { + equal: { _id: row._id }, + }, + limit: 1, + }).toContainExactly([row]) + }) + }) + + !isInternal && + describe("search by composite key", () => { + beforeAll(async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + idColumn1: { + name: "idColumn1", + type: FieldType.NUMBER, + }, + idColumn2: { + name: "idColumn2", + type: FieldType.NUMBER, + }, + }, + primary: ["idColumn1", "idColumn2"], + }) + ) + tableOrViewId = table._id! + await createRows([{ idColumn1: 1, idColumn2: 2 }]) + }) + + it("can filter by the row ID with limit 1", async () => { + await expectSearch({ + query: { + equal: { _id: generateRowIdField([1, 2]) }, + }, + limit: 1, + }).toContain([ + { + idColumn1: 1, + idColumn2: 2, + }, + ]) + }) + }) + + isSql && + describe("primaryDisplay", () => { + beforeAll(async () => { + let toRelateTableId = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, + }, + link: { + name: "link", + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: toRelateTableId, + fieldName: "link", + }, + }) + + const toRelateTable = await config.api.table.get( + toRelateTableId + ) + await config.api.table.save({ + ...toRelateTable, + primaryDisplay: "link", + }) + const relatedRows = await Promise.all([ + config.api.row.save(toRelateTable._id!, { + name: "related", + }), + ]) + await config.api.row.save(tableOrViewId, { + name: "test", + link: relatedRows.map(row => row._id), + }) + }) + + it("should be able to query, primary display on related table shouldn't be used", async () => { + // this test makes sure that if a relationship has been specified as the primary display on a table + // it is ignored and another column is used instead + await expectQuery({}).toContain([ + { name: "test", link: [{ primaryDisplay: "related" }] }, + ]) + }) + }) + + describe("$and", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + age: { name: "age", type: FieldType.NUMBER }, + name: { name: "name", type: FieldType.STRING }, + }) + await createRows([ + { age: 1, name: "Jane" }, + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds a row for one level condition", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([{ age: 10, name: "Jack" }]) + }) + + it("successfully finds a row for one level with multiple conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([{ age: 10, name: "Jack" }]) + }) + + it("successfully finds multiple rows for one level with multiple conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { range: { age: { low: 1, high: 9 } } }, + { string: { name: "Ja" } }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds rows for nested filters", async () => { + await expectQuery({ + $and: { + conditions: [ + { + $and: { + conditions: [ + { + range: { age: { low: 1, high: 10 } }, + }, + { string: { name: "Ja" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([{ age: 1, name: "Jane" }]) + }) + + it("returns nothing when filtering out all data", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { age: 7 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toFindNothing() + }) + + !isInMemory && + it("validates conditions that are not objects", async () => { + await expect( + expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + "invalidCondition" as any, + ], + }, + }).toFindNothing() + ).rejects.toThrow( + 'Invalid body - "query.$and.conditions[1]" must be of type object' + ) + }) + + !isInMemory && + it("validates $and without conditions", async () => { + await expect( + expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + { + $and: { + conditions: undefined as any, + }, + }, + ], + }, + }).toFindNothing() + ).rejects.toThrow( + 'Invalid body - "query.$and.conditions[1].$and.conditions" is required' + ) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("returns no rows when onEmptyFilter set to none", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + $and: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toFindNothing() + }) + + it("returns all rows when onEmptyFilter set to all", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toHaveLength(4) + }) + }) + + describe("$or", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + age: { name: "age", type: FieldType.NUMBER }, + name: { name: "name", type: FieldType.STRING }, + }) + await createRows([ + { age: 1, name: "Jane" }, + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds a row for one level condition", async () => { + await expectQuery({ + $or: { + conditions: [ + { equal: { age: 7 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([ + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + ]) + }) + + it("successfully finds a row for one level with multiple conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { equal: { age: 7 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([ + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + ]) + }) + + it("successfully finds multiple rows for one level with multiple conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { range: { age: { low: 1, high: 9 } } }, + { string: { name: "Jan" } }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds rows for nested filters", async () => { + await expectQuery({ + $or: { + conditions: [ + { + $or: { + conditions: [ + { + range: { age: { low: 1, high: 7 } }, + }, + { string: { name: "Jan" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("returns nothing when filtering out all data", async () => { + await expectQuery({ + $or: { + conditions: [ + { equal: { age: 6 } }, + { equal: { name: "John" } }, + ], + }, + }).toFindNothing() + }) + + it("can nest $and under $or filters", async () => { + await expectQuery({ + $or: { + conditions: [ + { + $and: { + conditions: [ + { + range: { age: { low: 1, high: 8 } }, + }, + { equal: { name: "Jan" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("can nest $or under $and filters", async () => { + await expectQuery({ + $and: { + conditions: [ + { + $or: { + conditions: [ + { + range: { age: { low: 1, high: 8 } }, + }, + { equal: { name: "Jan" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([{ age: 1, name: "Jane" }]) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("returns no rows when onEmptyFilter set to none", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + $or: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toFindNothing() + }) + + it("returns all rows when onEmptyFilter set to all", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $or: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toHaveLength(4) + }) + }) + + isSql && + describe("max related columns", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedSchema: TableSchema = {} + const row: Row = {} + for (let i = 0; i < 100; i++) { + const name = `column${i}` + relatedSchema[name] = { name, type: FieldType.NUMBER } + row[name] = i + } + const relatedTable = await createTable(relatedSchema) + tableOrViewId = await createTableOrView({ + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTable, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }) + relatedRows = await Promise.all([ + config.api.row.save(relatedTable, row), + ]) + await config.api.row.save(tableOrViewId, { + name: "foo", + related1: [relatedRows[0]._id], + }) + }) + + it("retrieve the row with relationships", async () => { + await expectQuery({}).toContainExactly([ + { + name: "foo", + related1: [{ _id: relatedRows[0]._id }], + }, + ]) + }) + }) + + !isInternal && + describe("SQL injection", () => { + const badStrings = [ + "1; DROP TABLE %table_name%;", + "1; DELETE FROM %table_name%;", + "1; UPDATE %table_name% SET name = 'foo';", + "1; INSERT INTO %table_name% (name) VALUES ('foo');", + "' OR '1'='1' --", + "'; DROP TABLE %table_name%; --", + "' OR 1=1 --", + "' UNION SELECT null, null, null; --", + "' AND (SELECT COUNT(*) FROM %table_name%) > 0 --", + "\"; EXEC xp_cmdshell('dir'); --", + "\"' OR 'a'='a", + "OR 1=1;", + "'; SHUTDOWN --", + ] + + describe.each(badStrings)( + "bad string: %s", + badStringTemplate => { + // The SQL that knex generates when you try to use a double quote in a + // field name is always invalid and never works, so we skip it for these + // tests. + const skipFieldNameCheck = + isOracle && badStringTemplate.includes('"') + + !skipFieldNameCheck && + it("should not allow SQL injection as a field name", async () => { + const tableOrViewId = await createTableOrView() + const table = await getTable(tableOrViewId) + const badString = badStringTemplate.replace( + /%table_name%/g, + table.name + ) + + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + [badString]: { + name: badString, + type: FieldType.STRING, + }, + }, + }) + + if (docIds.isViewId(tableOrViewId)) { + const view = await config.api.viewV2.get( + tableOrViewId + ) + await config.api.viewV2.update({ + ...view, + schema: { + [badString]: { visible: true }, + }, + }) + } + + await config.api.row.save(tableOrViewId, { + [badString]: "foo", + }) + + await assertTableExists(table) + await assertTableNumRows(table, 1) + + const { rows } = await config.api.row.search( + tableOrViewId, + { query: {} }, + { status: 200 } + ) + + expect(rows).toHaveLength(1) + + await assertTableExists(table) + await assertTableNumRows(table, 1) + }) + + it("should not allow SQL injection as a field value", async () => { + const tableOrViewId = await createTableOrView({ + foo: { + name: "foo", + type: FieldType.STRING, + }, + }) + const table = await getTable(tableOrViewId) + const badString = badStringTemplate.replace( + /%table_name%/g, + table.name + ) + + await config.api.row.save(tableOrViewId, { foo: "foo" }) + + await assertTableExists(table) + await assertTableNumRows(table, 1) + + const { rows } = await config.api.row.search( + tableOrViewId, + { query: { equal: { foo: badString } } }, + { status: 200 } + ) + + expect(rows).toBeEmpty() + await assertTableExists(table) + await assertTableNumRows(table, 1) + }) + } + ) + }) + } + ) }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index b9d8696714..8556a598c6 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -38,203 +38,758 @@ import timekeeper from "timekeeper" const { basicTable } = setup.structures const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/ -datasourceDescribe( - { name: "/tables (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, dsProvider, isInternal, isOracle }) => { - let datasource: Datasource | undefined +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - }) +if (descriptions.length) { + describe.each(descriptions)( + "/tables ($dbName)", + ({ config, dsProvider, isInternal, isOracle }) => { + let datasource: Datasource | undefined - describe("create", () => { - beforeEach(() => { - jest.clearAllMocks() + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource }) - let names = [ - "alphanum", - "with spaces", - "with-dashes", - "with_underscores", - "with `backticks`", - ] - - if (!isOracle) { - names.push(`with "double quotes"`) - names.push(`with 'single quotes'`) - } - - it.each(names)("creates a table with name: %s", async name => { - const table = await config.api.table.save( - tableForDatasource(datasource, { name }) - ) - expect(table.name).toEqual(name) - expect(events.table.created).toHaveBeenCalledTimes(1) - expect(events.table.created).toHaveBeenCalledWith(table) - - const res = await config.api.table.get(table._id!) - expect(res.name).toEqual(name) - }) - - it("creates a table via data import", async () => { - const table: SaveTableRequest = basicTable() - table.rows = [{ name: "test-name", description: "test-desc" }] - - const res = await config.api.table.save(table) - - expect(events.table.created).toHaveBeenCalledTimes(1) - expect(events.table.created).toHaveBeenCalledWith(res) - expect(events.table.imported).toHaveBeenCalledTimes(1) - expect(events.table.imported).toHaveBeenCalledWith(res) - expect(events.rows.imported).toHaveBeenCalledTimes(1) - expect(events.rows.imported).toHaveBeenCalledWith(res, 1) - }) - - it("should not allow a column to have a default value and be required", async () => { - await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - default: "default", - constraints: { - presence: true, - }, - }, - }, - }), - { - status: 400, - body: { - message: - 'Cannot make field "name" required, it has a default value.', - }, - } - ) - }) - - it("should apply authorization to endpoint", async () => { - await checkBuilderEndpoint({ - config, - method: "POST", - url: `/api/tables`, - body: basicTable(), + describe("create", () => { + beforeEach(() => { + jest.clearAllMocks() }) - }) - it("does not persist the row fields that are not on the table schema", async () => { - const table: SaveTableRequest = basicTable() - table.rows = [ - { - name: "test-name", - description: "test-desc", - nonValid: "test-non-valid", - }, + let names = [ + "alphanum", + "with spaces", + "with-dashes", + "with_underscores", + "with `backticks`", ] - const res = await config.api.table.save(table) + if (!isOracle) { + names.push(`with "double quotes"`) + names.push(`with 'single quotes'`) + } - const persistedRows = await config.api.row.search(res._id!) + it.each(names)("creates a table with name: %s", async name => { + const table = await config.api.table.save( + tableForDatasource(datasource, { name }) + ) + expect(table.name).toEqual(name) + expect(events.table.created).toHaveBeenCalledTimes(1) + expect(events.table.created).toHaveBeenCalledWith(table) - expect(persistedRows.rows).toEqual([ - expect.objectContaining({ - name: "test-name", - description: "test-desc", - }), - ]) - expect(persistedRows.rows[0].nonValid).toBeUndefined() - }) + const res = await config.api.table.get(table._id!) + expect(res.name).toEqual(name) + }) - it.each( - isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS - )( - "cannot use protected column names (%s) while importing a table", - async columnName => { + it("creates a table via data import", async () => { + const table: SaveTableRequest = basicTable() + table.rows = [{ name: "test-name", description: "test-desc" }] + + const res = await config.api.table.save(table) + + expect(events.table.created).toHaveBeenCalledTimes(1) + expect(events.table.created).toHaveBeenCalledWith(res) + expect(events.table.imported).toHaveBeenCalledTimes(1) + expect(events.table.imported).toHaveBeenCalledWith(res) + expect(events.rows.imported).toHaveBeenCalledTimes(1) + expect(events.rows.imported).toHaveBeenCalledWith(res, 1) + }) + + it("should not allow a column to have a default value and be required", async () => { + await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + default: "default", + constraints: { + presence: true, + }, + }, + }, + }), + { + status: 400, + body: { + message: + 'Cannot make field "name" required, it has a default value.', + }, + } + ) + }) + + it("should apply authorization to endpoint", async () => { + await checkBuilderEndpoint({ + config, + method: "POST", + url: `/api/tables`, + body: basicTable(), + }) + }) + + it("does not persist the row fields that are not on the table schema", async () => { const table: SaveTableRequest = basicTable() table.rows = [ { name: "test-name", description: "test-desc", + nonValid: "test-non-valid", }, ] - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - [columnName]: { - name: columnName, - type: FieldType.STRING, + const res = await config.api.table.save(table) + + const persistedRows = await config.api.row.search(res._id!) + + expect(persistedRows.rows).toEqual([ + expect.objectContaining({ + name: "test-name", + description: "test-desc", + }), + ]) + expect(persistedRows.rows[0].nonValid).toBeUndefined() + }) + + it.each( + isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS + )( + "cannot use protected column names (%s) while importing a table", + async columnName => { + const table: SaveTableRequest = basicTable() + table.rows = [ + { + name: "test-name", + description: "test-desc", + }, + ] + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + [columnName]: { + name: columnName, + type: FieldType.STRING, + }, }, }, - }, - { - status: 400, - body: { - message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`, + { status: 400, - }, - } - ) - } - ) - }) - - describe("permissions", () => { - it("get the base permissions for the table", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, - }) + body: { + message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`, + status: 400, + }, + } + ) + } ) + }) - // get the explicit permissions - const { permissions } = await config.api.permission.get(table._id!, { - status: 200, - }) - const explicitPermissions = { - role: "ADMIN", - permissionType: "EXPLICIT", - } - expect(permissions.write).toEqual(explicitPermissions) - expect(permissions.read).toEqual(explicitPermissions) - - // revoke the explicit permissions - for (let level of [PermissionLevel.WRITE, PermissionLevel.READ]) { - await config.api.permission.revoke( - { - roleId: permissions[level].role, - resourceId: table._id!, - level, - }, - { status: 200 } + describe("permissions", () => { + it("get the base permissions for the table", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) ) - } - // check base permissions - const { permissions: basePermissions } = - await config.api.permission.get(table._id!, { + // get the explicit permissions + const { permissions } = await config.api.permission.get(table._id!, { status: 200, }) - const basePerms = { role: "BASIC", permissionType: "BASE" } - expect(basePermissions.write).toEqual(basePerms) - expect(basePermissions.read).toEqual(basePerms) - }) - }) + const explicitPermissions = { + role: "ADMIN", + permissionType: "EXPLICIT", + } + expect(permissions.write).toEqual(explicitPermissions) + expect(permissions.read).toEqual(explicitPermissions) - describe("update", () => { - it("updates a table", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { + // revoke the explicit permissions + for (let level of [PermissionLevel.WRITE, PermissionLevel.READ]) { + await config.api.permission.revoke( + { + roleId: permissions[level].role, + resourceId: table._id!, + level, + }, + { status: 200 } + ) + } + + // check base permissions + const { permissions: basePermissions } = + await config.api.permission.get(table._id!, { + status: 200, + }) + const basePerms = { role: "BASIC", permissionType: "BASE" } + expect(basePermissions.write).toEqual(basePerms) + expect(basePermissions.read).toEqual(basePerms) + }) + }) + + describe("update", () => { + it("updates a table", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + type: FieldType.STRING, + name: "name", + constraints: { + type: "string", + }, + }, + }, + }) + ) + + const updatedTable = await config.api.table.save({ + ...table, + name: generator.guid(), + }) + + expect(events.table.updated).toHaveBeenCalledTimes(1) + expect(events.table.updated).toHaveBeenCalledWith(updatedTable) + }) + + it("updates all the row fields for a table when a schema key is renamed", async () => { + const testTable = await config.api.table.save(basicTable(datasource)) + await config.createLegacyView({ + name: "TestView", + field: "Price", + calculation: ViewCalculation.STATISTICS, + tableId: testTable._id!, + schema: {}, + filters: [], + }) + + const testRow = await config.api.row.save(testTable._id!, { + name: "test", + }) + + const { name, ...otherColumns } = testTable.schema + const updatedTable = await config.api.table.save({ + ...testTable, + _rename: { + old: "name", + updated: "updatedName", + }, + schema: { + ...otherColumns, + updatedName: { + ...name, + name: "updatedName", + }, + }, + }) + + expect(updatedTable.name).toEqual(testTable.name) + + const res = await config.api.row.get(testTable._id!, testRow._id!) + expect(res.updatedName).toEqual("test") + expect(res.name).toBeUndefined() + }) + + isInternal && + it("updates only the passed fields", async () => { + await timekeeper.withFreeze(new Date(2021, 1, 1), async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + autoId: { + name: "id", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + constraints: { + type: "number", + presence: false, + }, + }, + }, + }) + ) + + const newName = generator.guid() + + const updatedTable = await config.api.table.save({ + ...table, + name: newName, + }) + + let expected: Table = { + ...table, + name: newName, + _id: expect.any(String), + } + if (isInternal) { + expected._rev = expect.stringMatching(/^2-.+/) + } + + expect(updatedTable).toEqual(expect.objectContaining(expected)) + + const persistedTable = await config.api.table.get( + updatedTable._id! + ) + expected = { + ...table, + name: newName, + _id: updatedTable._id, + } + if (datasource?.isSQL) { + expected.sql = true + } + if (isInternal) { + expected._rev = expect.stringMatching(/^2-.+/) + } + expect(persistedTable).toEqual(expect.objectContaining(expected)) + }) + }) + + describe("user table", () => { + isInternal && + it("should add roleId and email field when adjusting user table schema", async () => { + const table = await config.api.table.save({ + ...basicTable(datasource), + _id: "ta_users", + }) + expect(table.schema.email).toBeDefined() + expect(table.schema.roleId).toBeDefined() + }) + }) + + describe("default field validation", () => { + it("should error if an existing column is set to required and has a default value", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + default: "default", + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + default: "default", + constraints: { + presence: true, + }, + }, + }, + }, + { + status: 400, + body: { + message: + 'Cannot make field "name" required, it has a default value.', + }, + } + ) + }) + + it("should error if an existing column is given a default value and is required", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { + presence: true, + }, + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + default: "default", + constraints: { + presence: true, + }, + }, + }, + }, + { + status: 400, + body: { + message: + 'Cannot make field "name" required, it has a default value.', + }, + } + ) + }) + + it("should be able to set an existing column to have a default value if it's not required", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + default: "default", + }, + }, + }, + { status: 200 } + ) + }) + + it("should be able to remove a default value if the column is not required", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + default: "default", + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }, + { status: 200 } + ) + }) + }) + + describe("external table validation", () => { + !isInternal && + it("should error if column is of type auto", async () => { + const table = basicTable(datasource) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + auto: { + name: "auto", + autocolumn: true, + type: FieldType.AUTO, + subtype: AutoFieldSubType.AUTO_ID, + }, + }, + }, + { + status: 400, + body: { + message: `Column "auto" has type "${FieldType.AUTO}" - this is not supported.`, + }, + } + ) + }) + + !isInternal && + it("should error if column has auto subtype", async () => { + const table = basicTable(datasource) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + auto: { + name: "auto", + autocolumn: true, + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + }, + }, + }, + { + status: 400, + body: { + message: `Column "auto" has subtype "${AutoFieldSubType.AUTO_ID}" - this is not supported.`, + }, + } + ) + }) + }) + + isInternal && + it("shouldn't allow duplicate column names", async () => { + const saveTableRequest: SaveTableRequest = { + ...basicTable(), + } + saveTableRequest.schema["Type"] = { + type: FieldType.STRING, + name: "Type", + } + // allow the "Type" column - internal columns aren't case sensitive + await config.api.table.save(saveTableRequest, { + status: 200, + }) + saveTableRequest.schema.foo = { + type: FieldType.STRING, + name: "foo", + } + saveTableRequest.schema.FOO = { + type: FieldType.STRING, + name: "FOO", + } + + await config.api.table.save(saveTableRequest, { + status: 400, + body: { + message: + 'Column(s) "foo" are duplicated - check for other columns with these name (case in-sensitive)', + }, + }) + }) + + it("should add a new column for an internal DB table", async () => { + const saveTableRequest: SaveTableRequest = { + ...basicTable(), + } + + const response = await config.api.table.save(saveTableRequest) + + const expectedResponse = { + ...saveTableRequest, + _rev: expect.stringMatching(/^\d-.+/), + _id: expect.stringMatching(/^ta_.+/), + createdAt: expect.stringMatching(ISO_REGEX_PATTERN), + updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), + views: {}, + } + expect(response).toEqual(expectedResponse) + }) + }) + + describe("import", () => { + it("imports rows successfully", async () => { + const name = generator.guid() + const table = await config.api.table.save( + basicTable(datasource, { name }) + ) + const importRequest = { + schema: table.schema, + rows: [{ name: "test-name", description: "test-desc" }], + } + + jest.clearAllMocks() + + await config.api.table.import(table._id!, importRequest) + + expect(events.table.created).not.toHaveBeenCalled() + expect(events.rows.imported).toHaveBeenCalledTimes(1) + expect(events.rows.imported).toHaveBeenCalledWith( + expect.objectContaining({ + name, + _id: table._id, + }), + 1 + ) + }) + }) + + describe("fetch", () => { + let testTable: Table + + beforeEach(async () => { + testTable = await config.api.table.save( + basicTable(datasource, { name: generator.guid() }) + ) + }) + + it("returns all tables", async () => { + const res = await config.api.table.fetch() + const table = res.find(t => t._id === testTable._id) + expect(table).toBeDefined() + expect(table!.name).toEqual(testTable.name) + expect(table!.type).toEqual("table") + expect(table!.sourceType).toEqual(testTable.sourceType) + }) + + it("should apply authorization to endpoint", async () => { + await checkBuilderEndpoint({ + config, + method: "GET", + url: `/api/tables`, + }) + }) + + it("should enrich the view schemas", async () => { + const viewV2 = await config.api.viewV2.create({ + tableId: testTable._id!, + name: generator.guid(), + }) + const legacyView = await config.api.legacyView.save({ + tableId: testTable._id!, + name: generator.guid(), + filters: [], + schema: {}, + }) + + const res = await config.api.table.fetch() + + const table = res.find(t => t._id === testTable._id) + expect(table).toBeDefined() + expect(table!.views![viewV2.name]).toBeDefined() + + const expectedViewV2: ViewV2Enriched = { + ...viewV2, + schema: { + description: { + constraints: { + type: "string", + }, + name: "description", + type: FieldType.STRING, + visible: false, + }, + name: { + constraints: { + type: "string", + }, + name: "name", + type: FieldType.STRING, + visible: false, + }, + }, + } + + if (!isInternal) { + expectedViewV2.schema!.id = { + name: "id", + type: FieldType.NUMBER, + visible: false, + autocolumn: true, + } + } + + expect(table!.views![viewV2.name!]).toEqual(expectedViewV2) + + if (isInternal) { + expect(table!.views![legacyView.name!]).toBeDefined() + expect(table!.views![legacyView.name!]).toEqual({ + ...legacyView, + schema: { + description: { + constraints: { + type: "string", + }, + name: "description", + type: "string", + }, + name: { + constraints: { + type: "string", + }, + name: "name", + type: "string", + }, + }, + }) + } + }) + }) + + describe("get", () => { + it("returns a table", async () => { + const table = await config.api.table.save( + basicTable(datasource, { name: generator.guid() }) + ) + const res = await config.api.table.get(table._id!) + expect(res).toEqual(expect.objectContaining(table)) + }) + }) + + describe("indexing", () => { + it("should be able to create a table with indexes", async () => { + await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + const indexCount = (await db.getIndexes()).total_rows + const table = basicTable() + table.indexes = ["name"] + const res = await config.api.table.save(table) + expect(res._id).toBeDefined() + expect(res._rev).toBeDefined() + expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) + // update index to see what happens + table.indexes = ["name", "description"] + await config.api.table.save({ + ...table, + _id: res._id, + _rev: res._rev, + }) + // shouldn't have created a new index + expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) + }) + }) + }) + + describe("destroy", () => { + let testTable: Table + + beforeEach(async () => { + testTable = await config.createTable() + }) + + it("returns a success response when a table is deleted.", async () => { + await config.api.table.destroy(testTable._id!, testTable._rev!, { + body: { message: `Table ${testTable._id} deleted.` }, + }) + expect(events.table.deleted).toHaveBeenCalledTimes(1) + expect(events.table.deleted).toHaveBeenCalledWith( + expect.objectContaining({ + ...testTable, + tableId: testTable._id, + }) + ) + }) + + it("deletes linked references to the table after deletion", async () => { + const linkedTable = await config.createTable({ + name: "LinkedTable", + type: "table", schema: { name: { type: FieldType.STRING, @@ -243,68 +798,299 @@ datasourceDescribe( type: "string", }, }, + TestTable: { + type: FieldType.LINK, + relationshipType: RelationshipType.ONE_TO_MANY, + name: "TestTable", + fieldName: "TestTable", + tableId: testTable._id!, + constraints: { + type: "array", + }, + }, }, }) - ) - const updatedTable = await config.api.table.save({ - ...table, - name: generator.guid(), + await config.api.table.destroy(testTable._id!, testTable._rev!, { + body: { message: `Table ${testTable._id} deleted.` }, + }) + const dependentTable = await config.api.table.get(linkedTable._id!) + expect(dependentTable.schema.TestTable).not.toBeDefined() }) - expect(events.table.updated).toHaveBeenCalledTimes(1) - expect(events.table.updated).toHaveBeenCalledWith(updatedTable) + it("should apply authorization to endpoint", async () => { + await checkBuilderEndpoint({ + config, + method: "DELETE", + url: `/api/tables/${testTable._id}/${testTable._rev}`, + }) + }) }) - it("updates all the row fields for a table when a schema key is renamed", async () => { - const testTable = await config.api.table.save(basicTable(datasource)) - await config.createLegacyView({ - name: "TestView", - field: "Price", - calculation: ViewCalculation.STATISTICS, - tableId: testTable._id!, - schema: {}, - filters: [], + describe("migrate", () => { + let users: User[] + beforeAll(async () => { + users = await Promise.all([ + config.createUser({ email: `${uuid.v4()}@example.com` }), + config.createUser({ email: `${uuid.v4()}@example.com` }), + config.createUser({ email: `${uuid.v4()}@example.com` }), + ]) }) - const testRow = await config.api.row.save(testTable._id!, { - name: "test", - }) - - const { name, ...otherColumns } = testTable.schema - const updatedTable = await config.api.table.save({ - ...testTable, - _rename: { - old: "name", - updated: "updatedName", - }, - schema: { - ...otherColumns, - updatedName: { - ...name, - name: "updatedName", + it("should successfully migrate a one-to-many user relationship to a user column", async () => { + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: InternalTable.USER_METADATA, + }, }, - }, + }) + + const rows = await Promise.all( + users.map(u => + config.api.row.save(table._id!, { "user relationship": [u] }) + ) + ) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const migratedRows = await config.api.row.fetch(table._id!) + + rows.sort((a, b) => a._id!.localeCompare(b._id!)) + migratedRows.sort((a, b) => a._id!.localeCompare(b._id!)) + + for (const [i, row] of rows.entries()) { + const migratedRow = migratedRows[i] + expect(migratedRow["user column"]).toBeDefined() + expect(migratedRow["user relationship"]).not.toBeDefined() + expect(row["user relationship"][0]._id).toEqual( + migratedRow["user column"]._id + ) + } }) - expect(updatedTable.name).toEqual(testTable.name) + it("should succeed when the row is created from the other side of the relationship", async () => { + // We found a bug just after releasing this feature where if the row was created from the + // users table, not the table linking to it, the migration would succeed but lose the data. + // This happened because the order of the documents in the link was reversed. + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: InternalTable.USER_METADATA, + }, + }, + }) - const res = await config.api.row.get(testTable._id!, testRow._id!) - expect(res.updatedName).toEqual("test") - expect(res.name).toBeUndefined() - }) + let testRow = await config.api.row.save(table._id!, {}) - isInternal && - it("updates only the passed fields", async () => { - await timekeeper.withFreeze(new Date(2021, 1, 1), async () => { - const table = await config.api.table.save( + await Promise.all( + users.map(u => + config.api.row.patch(InternalTable.USER_METADATA, { + tableId: InternalTable.USER_METADATA, + _rev: u._rev!, + _id: u._id!, + test: [testRow], + }) + ) + ) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const migratedRow = await config.api.row.get(table._id!, testRow._id!) + + expect(migratedRow["user column"]).toBeDefined() + expect(migratedRow["user relationship"]).not.toBeDefined() + expect(migratedRow["user column"]).toHaveLength(3) + expect(migratedRow["user column"].map((u: Row) => u._id)).toEqual( + expect.arrayContaining(users.map(u => u._id)) + ) + }) + + it("should successfully migrate a many-to-many user relationship to a users column", async () => { + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_MANY, + tableId: InternalTable.USER_METADATA, + }, + }, + }) + + const row1 = await config.api.row.save(table._id!, { + "user relationship": [users[0], users[1]], + }) + + const row2 = await config.api.row.save(table._id!, { + "user relationship": [users[1], users[2]], + }) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const row1Migrated = await config.api.row.get(table._id!, row1._id!) + expect(row1Migrated["user relationship"]).not.toBeDefined() + expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( + expect.arrayContaining([users[0]._id, users[1]._id]) + ) + + const row2Migrated = await config.api.row.get(table._id!, row2._id!) + expect(row2Migrated["user relationship"]).not.toBeDefined() + expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual( + expect.arrayContaining([users[1]._id, users[2]._id]) + ) + }) + + it("should successfully migrate a many-to-one user relationship to a users column", async () => { + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: InternalTable.USER_METADATA, + }, + }, + }) + + const row1 = await config.api.row.save(table._id!, { + "user relationship": [users[0], users[1]], + }) + + const row2 = await config.api.row.save(table._id!, { + "user relationship": [users[2]], + }) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const row1Migrated = await config.api.row.get(table._id!, row1._id!) + expect(row1Migrated["user relationship"]).not.toBeDefined() + expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( + expect.arrayContaining([users[0]._id, users[1]._id]) + ) + + const row2Migrated = await config.api.row.get(table._id!, row2._id!) + expect(row2Migrated["user relationship"]).not.toBeDefined() + expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([ + users[2]._id, + ]) + }) + + describe("unhappy paths", () => { + let table: Table + beforeAll(async () => { + table = await config.api.table.save( tableForDatasource(datasource, { schema: { - autoId: { - name: "id", + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: InternalTable.USER_METADATA, + }, + num: { type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - autocolumn: true, + name: "num", constraints: { type: "number", presence: false, @@ -313,1077 +1099,238 @@ datasourceDescribe( }, }) ) - - const newName = generator.guid() - - const updatedTable = await config.api.table.save({ - ...table, - name: newName, - }) - - let expected: Table = { - ...table, - name: newName, - _id: expect.any(String), - } - if (isInternal) { - expected._rev = expect.stringMatching(/^2-.+/) - } - - expect(updatedTable).toEqual(expect.objectContaining(expected)) - - const persistedTable = await config.api.table.get(updatedTable._id!) - expected = { - ...table, - name: newName, - _id: updatedTable._id, - } - if (datasource?.isSQL) { - expected.sql = true - } - if (isInternal) { - expected._rev = expect.stringMatching(/^2-.+/) - } - expect(persistedTable).toEqual(expect.objectContaining(expected)) }) - }) - describe("user table", () => { - isInternal && - it("should add roleId and email field when adjusting user table schema", async () => { - const table = await config.api.table.save({ - ...basicTable(datasource), - _id: "ta_users", - }) - expect(table.schema.email).toBeDefined() - expect(table.schema.roleId).toBeDefined() - }) - }) - - describe("default field validation", () => { - it("should error if an existing column is set to required and has a default value", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - default: "default", - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - default: "default", - constraints: { - presence: true, - }, - }, - }, - }, - { - status: 400, - body: { - message: - 'Cannot make field "name" required, it has a default value.', - }, - } - ) - }) - - it("should error if an existing column is given a default value and is required", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { - presence: true, - }, - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - default: "default", - constraints: { - presence: true, - }, - }, - }, - }, - { - status: 400, - body: { - message: - 'Cannot make field "name" required, it has a default value.', - }, - } - ) - }) - - it("should be able to set an existing column to have a default value if it's not required", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - default: "default", - }, - }, - }, - { status: 200 } - ) - }) - - it("should be able to remove a default value if the column is not required", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - default: "default", - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - }, - }, - }, - { status: 200 } - ) - }) - }) - - describe("external table validation", () => { - !isInternal && - it("should error if column is of type auto", async () => { - const table = basicTable(datasource) - await config.api.table.save( + it("should fail if the new column name is blank", async () => { + await config.api.table.migrate( + table._id!, { - ...table, - schema: { - ...table.schema, - auto: { - name: "auto", - autocolumn: true, - type: FieldType.AUTO, - subtype: AutoFieldSubType.AUTO_ID, - }, - }, + oldColumn: "user relationship", + newColumn: "", }, - { - status: 400, - body: { - message: `Column "auto" has type "${FieldType.AUTO}" - this is not supported.`, - }, - } + { status: 400 } ) }) - !isInternal && - it("should error if column has auto subtype", async () => { - const table = basicTable(datasource) - await config.api.table.save( + it("should fail if the new column name is a reserved name", async () => { + await config.api.table.migrate( + table._id!, { - ...table, - schema: { - ...table.schema, - auto: { - name: "auto", - autocolumn: true, - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - }, - }, + oldColumn: "user relationship", + newColumn: "_id", }, - { - status: 400, - body: { - message: `Column "auto" has subtype "${AutoFieldSubType.AUTO_ID}" - this is not supported.`, - }, - } + { status: 400 } ) }) - }) - isInternal && - it("shouldn't allow duplicate column names", async () => { - const saveTableRequest: SaveTableRequest = { - ...basicTable(), - } - saveTableRequest.schema["Type"] = { - type: FieldType.STRING, - name: "Type", - } - // allow the "Type" column - internal columns aren't case sensitive - await config.api.table.save(saveTableRequest, { - status: 200, + it("should fail if the new column name is the same as an existing column", async () => { + await config.api.table.migrate( + table._id!, + { + oldColumn: "user relationship", + newColumn: "num", + }, + { status: 400 } + ) }) - saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" } - saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" } - await config.api.table.save(saveTableRequest, { - status: 400, - body: { - message: - 'Column(s) "foo" are duplicated - check for other columns with these name (case in-sensitive)', - }, + it("should fail if the old column name isn't a column in the table", async () => { + await config.api.table.migrate( + table._id!, + { + oldColumn: "not a column", + newColumn: "new column", + }, + { status: 400 } + ) }) }) - - it("should add a new column for an internal DB table", async () => { - const saveTableRequest: SaveTableRequest = { - ...basicTable(), - } - - const response = await config.api.table.save(saveTableRequest) - - const expectedResponse = { - ...saveTableRequest, - _rev: expect.stringMatching(/^\d-.+/), - _id: expect.stringMatching(/^ta_.+/), - createdAt: expect.stringMatching(ISO_REGEX_PATTERN), - updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), - views: {}, - } - expect(response).toEqual(expectedResponse) - }) - }) - - describe("import", () => { - it("imports rows successfully", async () => { - const name = generator.guid() - const table = await config.api.table.save( - basicTable(datasource, { name }) - ) - const importRequest = { - schema: table.schema, - rows: [{ name: "test-name", description: "test-desc" }], - } - - jest.clearAllMocks() - - await config.api.table.import(table._id!, importRequest) - - expect(events.table.created).not.toHaveBeenCalled() - expect(events.rows.imported).toHaveBeenCalledTimes(1) - expect(events.rows.imported).toHaveBeenCalledWith( - expect.objectContaining({ - name, - _id: table._id, - }), - 1 - ) - }) - }) - - describe("fetch", () => { - let testTable: Table - - beforeEach(async () => { - testTable = await config.api.table.save( - basicTable(datasource, { name: generator.guid() }) - ) }) - it("returns all tables", async () => { - const res = await config.api.table.fetch() - const table = res.find(t => t._id === testTable._id) - expect(table).toBeDefined() - expect(table!.name).toEqual(testTable.name) - expect(table!.type).toEqual("table") - expect(table!.sourceType).toEqual(testTable.sourceType) - }) - - it("should apply authorization to endpoint", async () => { - await checkBuilderEndpoint({ - config, - method: "GET", - url: `/api/tables`, - }) - }) - - it("should enrich the view schemas", async () => { - const viewV2 = await config.api.viewV2.create({ - tableId: testTable._id!, - name: generator.guid(), - }) - const legacyView = await config.api.legacyView.save({ - tableId: testTable._id!, - name: generator.guid(), - filters: [], - schema: {}, - }) - - const res = await config.api.table.fetch() - - const table = res.find(t => t._id === testTable._id) - expect(table).toBeDefined() - expect(table!.views![viewV2.name]).toBeDefined() - - const expectedViewV2: ViewV2Enriched = { - ...viewV2, - schema: { - description: { - constraints: { - type: "string", - }, - name: "description", - type: FieldType.STRING, - visible: false, - }, - name: { - constraints: { - type: "string", - }, - name: "name", - type: FieldType.STRING, - visible: false, - }, - }, - } - - if (!isInternal) { - expectedViewV2.schema!.id = { - name: "id", - type: FieldType.NUMBER, - visible: false, - autocolumn: true, - } - } - - expect(table!.views![viewV2.name!]).toEqual(expectedViewV2) - - if (isInternal) { - expect(table!.views![legacyView.name!]).toBeDefined() - expect(table!.views![legacyView.name!]).toEqual({ - ...legacyView, - schema: { - description: { - constraints: { - type: "string", - }, - name: "description", - type: "string", - }, - name: { - constraints: { - type: "string", - }, - name: "name", - type: "string", - }, - }, - }) - } - }) - }) - - describe("get", () => { - it("returns a table", async () => { - const table = await config.api.table.save( - basicTable(datasource, { name: generator.guid() }) - ) - const res = await config.api.table.get(table._id!) - expect(res).toEqual(expect.objectContaining(table)) - }) - }) - - describe("indexing", () => { - it("should be able to create a table with indexes", async () => { - await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - const indexCount = (await db.getIndexes()).total_rows - const table = basicTable() - table.indexes = ["name"] - const res = await config.api.table.save(table) - expect(res._id).toBeDefined() - expect(res._rev).toBeDefined() - expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) - // update index to see what happens - table.indexes = ["name", "description"] - await config.api.table.save({ - ...table, - _id: res._id, - _rev: res._rev, - }) - // shouldn't have created a new index - expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) - }) - }) - }) - - describe("destroy", () => { - let testTable: Table - - beforeEach(async () => { - testTable = await config.createTable() - }) - - it("returns a success response when a table is deleted.", async () => { - await config.api.table.destroy(testTable._id!, testTable._rev!, { - body: { message: `Table ${testTable._id} deleted.` }, - }) - expect(events.table.deleted).toHaveBeenCalledTimes(1) - expect(events.table.deleted).toHaveBeenCalledWith( - expect.objectContaining({ - ...testTable, - tableId: testTable._id, - }) - ) - }) - - it("deletes linked references to the table after deletion", async () => { - const linkedTable = await config.createTable({ - name: "LinkedTable", - type: "table", - schema: { - name: { - type: FieldType.STRING, - name: "name", - constraints: { - type: "string", - }, - }, - TestTable: { - type: FieldType.LINK, - relationshipType: RelationshipType.ONE_TO_MANY, - name: "TestTable", - fieldName: "TestTable", - tableId: testTable._id!, - constraints: { - type: "array", - }, - }, - }, - }) - - await config.api.table.destroy(testTable._id!, testTable._rev!, { - body: { message: `Table ${testTable._id} deleted.` }, - }) - const dependentTable = await config.api.table.get(linkedTable._id!) - expect(dependentTable.schema.TestTable).not.toBeDefined() - }) - - it("should apply authorization to endpoint", async () => { - await checkBuilderEndpoint({ - config, - method: "DELETE", - url: `/api/tables/${testTable._id}/${testTable._rev}`, - }) - }) - }) - - describe("migrate", () => { - let users: User[] - beforeAll(async () => { - users = await Promise.all([ - config.createUser({ email: `${uuid.v4()}@example.com` }), - config.createUser({ email: `${uuid.v4()}@example.com` }), - config.createUser({ email: `${uuid.v4()}@example.com` }), - ]) - }) - - it("should successfully migrate a one-to-many user relationship to a user column", async () => { - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - const rows = await Promise.all( - users.map(u => - config.api.row.save(table._id!, { "user relationship": [u] }) - ) - ) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const migratedRows = await config.api.row.fetch(table._id!) - - rows.sort((a, b) => a._id!.localeCompare(b._id!)) - migratedRows.sort((a, b) => a._id!.localeCompare(b._id!)) - - for (const [i, row] of rows.entries()) { - const migratedRow = migratedRows[i] - expect(migratedRow["user column"]).toBeDefined() - expect(migratedRow["user relationship"]).not.toBeDefined() - expect(row["user relationship"][0]._id).toEqual( - migratedRow["user column"]._id - ) - } - }) - - it("should succeed when the row is created from the other side of the relationship", async () => { - // We found a bug just after releasing this feature where if the row was created from the - // users table, not the table linking to it, the migration would succeed but lose the data. - // This happened because the order of the documents in the link was reversed. - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - let testRow = await config.api.row.save(table._id!, {}) - - await Promise.all( - users.map(u => - config.api.row.patch(InternalTable.USER_METADATA, { - tableId: InternalTable.USER_METADATA, - _rev: u._rev!, - _id: u._id!, - test: [testRow], - }) - ) - ) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const migratedRow = await config.api.row.get(table._id!, testRow._id!) - - expect(migratedRow["user column"]).toBeDefined() - expect(migratedRow["user relationship"]).not.toBeDefined() - expect(migratedRow["user column"]).toHaveLength(3) - expect(migratedRow["user column"].map((u: Row) => u._id)).toEqual( - expect.arrayContaining(users.map(u => u._id)) - ) - }) - - it("should successfully migrate a many-to-many user relationship to a users column", async () => { - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_MANY, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - const row1 = await config.api.row.save(table._id!, { - "user relationship": [users[0], users[1]], - }) - - const row2 = await config.api.row.save(table._id!, { - "user relationship": [users[1], users[2]], - }) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const row1Migrated = await config.api.row.get(table._id!, row1._id!) - expect(row1Migrated["user relationship"]).not.toBeDefined() - expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( - expect.arrayContaining([users[0]._id, users[1]._id]) - ) - - const row2Migrated = await config.api.row.get(table._id!, row2._id!) - expect(row2Migrated["user relationship"]).not.toBeDefined() - expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual( - expect.arrayContaining([users[1]._id, users[2]._id]) - ) - }) - - it("should successfully migrate a many-to-one user relationship to a users column", async () => { - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - const row1 = await config.api.row.save(table._id!, { - "user relationship": [users[0], users[1]], - }) - - const row2 = await config.api.row.save(table._id!, { - "user relationship": [users[2]], - }) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const row1Migrated = await config.api.row.get(table._id!, row1._id!) - expect(row1Migrated["user relationship"]).not.toBeDefined() - expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( - expect.arrayContaining([users[0]._id, users[1]._id]) - ) - - const row2Migrated = await config.api.row.get(table._id!, row2._id!) - expect(row2Migrated["user relationship"]).not.toBeDefined() - expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([ - users[2]._id, - ]) - }) - - describe("unhappy paths", () => { - let table: Table - beforeAll(async () => { - table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: InternalTable.USER_METADATA, - }, - num: { - type: FieldType.NUMBER, - name: "num", - constraints: { - type: "number", - presence: false, - }, - }, - }, - }) - ) - }) - - it("should fail if the new column name is blank", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "user relationship", - newColumn: "", - }, - { status: 400 } - ) - }) - - it("should fail if the new column name is a reserved name", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "user relationship", - newColumn: "_id", - }, - { status: 400 } - ) - }) - - it("should fail if the new column name is the same as an existing column", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "user relationship", - newColumn: "num", - }, - { status: 400 } - ) - }) - - it("should fail if the old column name isn't a column in the table", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "not a column", - newColumn: "new column", - }, - { status: 400 } - ) - }) - }) - }) - - describe.each([ - [ - RowExportFormat.CSV, - (val: any) => JSON.stringify(val).replace(/"/g, "'"), - ], - [RowExportFormat.JSON, (val: any) => val], - ])("import validation (%s)", (_, userParser) => { - const basicSchema: TableSchema = { - id: { - type: FieldType.NUMBER, - name: "id", - }, - name: { - type: FieldType.STRING, - name: "name", - }, - } - - const importCases: [ - string, - ( - rows: Row[], - schema: TableSchema - ) => Promise - ][] = [ + describe.each([ [ - "validateNewTableImport", - async (rows: Row[], schema: TableSchema) => { - const result = await config.api.table.validateNewTableImport({ - rows, - schema, - }) - return result - }, + RowExportFormat.CSV, + (val: any) => JSON.stringify(val).replace(/"/g, "'"), ], - [ - "validateExistingTableImport", - async (rows: Row[], schema: TableSchema) => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], + [RowExportFormat.JSON, (val: any) => val], + ])("import validation (%s)", (_, userParser) => { + const basicSchema: TableSchema = { + id: { + type: FieldType.NUMBER, + name: "id", + }, + name: { + type: FieldType.STRING, + name: "name", + }, + } + + const importCases: [ + string, + ( + rows: Row[], + schema: TableSchema + ) => Promise + ][] = [ + [ + "validateNewTableImport", + async (rows: Row[], schema: TableSchema) => { + const result = await config.api.table.validateNewTableImport({ + rows, schema, }) + return result + }, + ], + [ + "validateExistingTableImport", + async (rows: Row[], schema: TableSchema) => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema, + }) + ) + const result = await config.api.table.validateExistingTableImport( + { + tableId: table._id, + rows, + } + ) + return result + }, + ], + ] + + describe.each(importCases)("%s", (_, testDelegate) => { + it("validates basic imports", async () => { + const result = await testDelegate( + [{ id: generator.natural(), name: generator.first() }], + basicSchema ) - const result = await config.api.table.validateExistingTableImport({ - tableId: table._id, - rows, - }) - return result - }, - ], - ] - describe.each(importCases)("%s", (_, testDelegate) => { - it("validates basic imports", async () => { - const result = await testDelegate( - [{ id: generator.natural(), name: generator.first() }], - basicSchema - ) - - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - }, - }) - }) - - it.each( - isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS - )("don't allow protected names in schema (%s)", async columnName => { - const result = await config.api.table.validateNewTableImport({ - rows: [ - { - id: generator.natural(), - name: generator.first(), - [columnName]: generator.word(), + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, }, - ], - schema: { - ...basicSchema, - }, + }) }) - expect(result).toEqual({ - allValid: false, - errors: { - [columnName]: `${columnName} is a protected column name`, - }, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - [columnName]: false, - }, - }) - }) - - it("does not allow imports without rows", async () => { - const result = await testDelegate([], basicSchema) - - expect(result).toEqual({ - allValid: false, - errors: {}, - invalidColumns: [], - schemaValidation: {}, - }) - }) - - it("validates imports with some empty rows", async () => { - const result = await testDelegate( - [{}, { id: generator.natural(), name: generator.first() }, {}], - basicSchema - ) - - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - }, - }) - }) - - isInternal && it.each( isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS - )( - "don't allow protected names in the rows (%s)", - async columnName => { - const result = await config.api.table.validateNewTableImport({ - rows: [ - { - id: generator.natural(), - name: generator.first(), + )("don't allow protected names in schema (%s)", async columnName => { + const result = await config.api.table.validateNewTableImport({ + rows: [ + { + id: generator.natural(), + name: generator.first(), + [columnName]: generator.word(), + }, + ], + schema: { + ...basicSchema, + }, + }) + + expect(result).toEqual({ + allValid: false, + errors: { + [columnName]: `${columnName} is a protected column name`, + }, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + [columnName]: false, + }, + }) + }) + + it("does not allow imports without rows", async () => { + const result = await testDelegate([], basicSchema) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: {}, + }) + }) + + it("validates imports with some empty rows", async () => { + const result = await testDelegate( + [{}, { id: generator.natural(), name: generator.first() }, {}], + basicSchema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + }, + }) + }) + + isInternal && + it.each( + isInternal + ? PROTECTED_INTERNAL_COLUMNS + : PROTECTED_EXTERNAL_COLUMNS + )( + "don't allow protected names in the rows (%s)", + async columnName => { + const result = await config.api.table.validateNewTableImport({ + rows: [ + { + id: generator.natural(), + name: generator.first(), + }, + ], + schema: { + ...basicSchema, + [columnName]: { + name: columnName, + type: FieldType.STRING, + }, }, - ], - schema: { - ...basicSchema, - [columnName]: { - name: columnName, - type: FieldType.STRING, + }) + + expect(result).toEqual({ + allValid: false, + errors: { + [columnName]: `${columnName} is a protected column name`, }, - }, - }) + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + [columnName]: false, + }, + }) + } + ) - expect(result).toEqual({ - allValid: false, - errors: { - [columnName]: `${columnName} is a protected column name`, - }, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - [columnName]: false, - }, - }) - } - ) - - it("validates required fields and valid rows", async () => { - const schema: TableSchema = { - ...basicSchema, - name: { - type: FieldType.STRING, - name: "name", - constraints: { presence: true }, - }, - } - - const result = await testDelegate( - [ - { id: generator.natural(), name: generator.first() }, - { id: generator.natural(), name: generator.first() }, - ], - schema - ) - - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - }, - }) - }) - - it("validates required fields and non-valid rows", async () => { - const schema: TableSchema = { - ...basicSchema, - name: { - type: FieldType.STRING, - name: "name", - constraints: { presence: true }, - }, - } - - const result = await testDelegate( - [ - { id: generator.natural(), name: generator.first() }, - { id: generator.natural(), name: "" }, - ], - schema - ) - - expect(result).toEqual({ - allValid: false, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: false, - }, - }) - }) - - describe("bb references", () => { - const getUserValues = () => ({ - _id: docIds.generateGlobalUserID(), - primaryDisplay: generator.first(), - email: generator.email({}), - }) - - it("can validate user column imports", async () => { + it("validates required fields and valid rows", async () => { const schema: TableSchema = { ...basicSchema, - user: { - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - name: "user", + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, }, } const result = await testDelegate( [ - { - id: generator.natural(), - name: generator.first(), - user: userParser(getUserValues()), - }, + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: generator.first() }, ], schema ) @@ -1395,33 +1342,24 @@ datasourceDescribe( schemaValidation: { id: true, name: true, - user: true, }, }) }) - it("can validate user column imports with invalid data", async () => { + it("validates required fields and non-valid rows", async () => { const schema: TableSchema = { ...basicSchema, - user: { - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - name: "user", + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, }, } const result = await testDelegate( [ - { - id: generator.natural(), - name: generator.first(), - user: userParser(getUserValues()), - }, - { - id: generator.natural(), - name: generator.first(), - user: "no valid user data", - }, + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: "" }, ], schema ) @@ -1432,84 +1370,164 @@ datasourceDescribe( invalidColumns: [], schemaValidation: { id: true, - name: true, - user: false, + name: false, }, }) }) - it("can validate users column imports", async () => { - const schema: TableSchema = { - ...basicSchema, - user: { - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - name: "user", - externalType: "array", - }, - } + describe("bb references", () => { + const getUserValues = () => ({ + _id: docIds.generateGlobalUserID(), + primaryDisplay: generator.first(), + email: generator.email({}), + }) - const result = await testDelegate( - [ - { - id: generator.natural(), - name: generator.first(), - user: userParser([ - getUserValues(), - getUserValues(), - getUserValues(), - ]), + it("can validate user column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", }, - ], - schema - ) + } - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - user: true, - }, + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) + }) + + it("can validate user column imports with invalid data", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + { + id: generator.natural(), + name: generator.first(), + user: "no valid user data", + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: false, + }, + }) + }) + + it("can validate users column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + externalType: "array", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser([ + getUserValues(), + getUserValues(), + getUserValues(), + ]), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) }) }) }) - }) - describe("validateExistingTableImport", () => { - isInternal && - it("can reimport _id fields for internal tables", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], - schema: basicSchema, - }) - ) - const result = await config.api.table.validateExistingTableImport({ - tableId: table._id, - rows: [ + describe("validateExistingTableImport", () => { + isInternal && + it("can reimport _id fields for internal tables", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema: basicSchema, + }) + ) + const result = await config.api.table.validateExistingTableImport( { - _id: docIds.generateRowID(table._id!), - id: generator.natural(), - name: generator.first(), - }, - ], - }) + tableId: table._id, + rows: [ + { + _id: docIds.generateRowID(table._id!), + id: generator.natural(), + name: generator.first(), + }, + ], + } + ) - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - _id: true, - id: true, - name: true, - }, + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + _id: true, + id: true, + name: true, + }, + }) }) - }) + }) }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index 63d315cea9..6e82395e19 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -44,158 +44,163 @@ import merge from "lodash/merge" import { quotas } from "@budibase/pro" import { db, roles, context } from "@budibase/backend-core" -datasourceDescribe( - { name: "/v2/views (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, isInternal, dsProvider }) => { - let table: Table - let rawDatasource: Datasource | undefined - let datasource: Datasource | undefined +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - function saveTableRequest( - ...overrides: Partial>[] - ): SaveTableRequest { - const req: SaveTableRequest = { - name: generator.guid().replaceAll("-", "").substring(0, 16), - type: "table", - sourceType: datasource - ? TableSourceType.EXTERNAL - : TableSourceType.INTERNAL, - sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, - primary: ["id"], - schema: { - id: { - type: FieldType.NUMBER, - name: "id", - autocolumn: true, - constraints: { - presence: true, +if (descriptions.length) { + describe.each(descriptions)( + "/v2/views ($dbName)", + ({ config, isInternal, dsProvider }) => { + let table: Table + let rawDatasource: Datasource | undefined + let datasource: Datasource | undefined + + function saveTableRequest( + ...overrides: Partial>[] + ): SaveTableRequest { + const req: SaveTableRequest = { + name: generator.guid().replaceAll("-", "").substring(0, 16), + type: "table", + sourceType: datasource + ? TableSourceType.EXTERNAL + : TableSourceType.INTERNAL, + sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, + primary: ["id"], + schema: { + id: { + type: FieldType.NUMBER, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, }, }, - }, + } + return merge(req, ...overrides) } - return merge(req, ...overrides) - } - function priceTable(): SaveTableRequest { - return saveTableRequest({ - schema: { - Price: { - type: FieldType.NUMBER, - name: "Price", - constraints: {}, - }, - Category: { - type: FieldType.STRING, - name: "Category", - constraints: { - type: "string", + function priceTable(): SaveTableRequest { + return saveTableRequest({ + schema: { + Price: { + type: FieldType.NUMBER, + name: "Price", + constraints: {}, + }, + Category: { + type: FieldType.STRING, + name: "Category", + constraints: { + type: "string", + }, }, }, - }, - }) - } - - beforeAll(async () => { - await config.init() - - const ds = await dsProvider() - rawDatasource = ds.rawDatasource - datasource = ds.datasource - table = await config.api.table.save(priceTable()) - }) - - beforeEach(() => { - jest.clearAllMocks() - mocks.licenses.useCloudFree() - }) - - describe("view crud", () => { - describe("create", () => { - it("persist the view when the view is successfully created", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - }, - } - const res = await config.api.viewV2.create(newView) - - expect(res).toEqual({ - ...newView, - id: expect.stringMatching(new RegExp(`${table._id!}_`)), - version: 2, - }) }) + } - it("can persist views with all fields", async () => { - const newView: Required> = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "id", - queryUI: { - groups: [ - { - filters: [ + beforeAll(async () => { + await config.init() + + const ds = await dsProvider() + rawDatasource = ds.rawDatasource + datasource = ds.datasource + table = await config.api.table.save(priceTable()) + }) + + beforeEach(() => { + jest.clearAllMocks() + mocks.licenses.useCloudFree() + }) + + describe("view crud", () => { + describe("create", () => { + it("persist the view when the view is successfully created", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + }, + } + const res = await config.api.viewV2.create(newView) + + expect(res).toEqual({ + ...newView, + id: expect.stringMatching(new RegExp(`${table._id!}_`)), + version: 2, + }) + }) + + it("can persist views with all fields", async () => { + const newView: Required> = + { + name: generator.name(), + tableId: table._id!, + primaryDisplay: "id", + queryUI: { + groups: [ { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], - }, - ], - }, - sort: { - field: "fieldToSort", - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - } - const res = await config.api.viewV2.create(newView) - - const expected: ViewV2 = { - ...newView, - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [ - { - $and: { - conditions: [ + filters: [ { - equal: { - field: "value", - }, + operator: BasicOperator.EQUAL, + field: "field", + value: "value", }, ], }, + ], + }, + sort: { + field: "fieldToSort", + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, + schema: { + id: { visible: true }, + Price: { + visible: true, }, - ], + }, + } + const res = await config.api.viewV2.create(newView) + + const expected: ViewV2 = { + ...newView, + schema: { + id: { visible: true }, + Price: { + visible: true, + }, }, - }, - id: expect.any(String), - version: 2, - } + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { + field: "value", + }, + }, + ], + }, + }, + ], + }, + }, + id: expect.any(String), + version: 2, + } - expect(res).toEqual(expected) - }) + expect(res).toEqual(expected) + }) - it("can create a view with just a query field, no queryUI, for backwards compatibility", async () => { - const newView: Required> = - { + it("can create a view with just a query field, no queryUI, for backwards compatibility", async () => { + const newView: Required< + Omit + > = { name: generator.name(), tableId: table._id!, primaryDisplay: "id", @@ -218,180 +223,194 @@ datasourceDescribe( }, }, } - const res = await config.api.viewV2.create(newView) + const res = await config.api.viewV2.create(newView) - const expected: ViewV2 = { - ...newView, - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - queryUI: { - logicalOperator: UILogicalOperator.ALL, - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], + const expected: ViewV2 = { + ...newView, + schema: { + id: { visible: true }, + Price: { + visible: true, }, - ], - }, - id: expect.any(String), - version: 2, - } - - expect(res).toEqual(expected) - }) - - it("persist only UI schema overrides", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { - name: "id", - type: FieldType.NUMBER, - visible: true, }, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - order: 1, - width: 100, - }, - Category: { - name: "Category", - type: FieldType.STRING, - visible: false, - icon: "ic", - }, - } as ViewV2Schema, - } - - const createdView = await config.api.viewV2.create(newView) - - expect(createdView).toEqual({ - ...newView, - schema: { - id: { visible: true }, - Price: { - visible: true, - order: 1, - width: 100, - }, - Category: { - visible: false, - icon: "ic", - }, - }, - id: createdView.id, - version: 2, - }) - }) - - it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { - name: "id", - type: FieldType.NUMBER, - autocolumn: true, - visible: true, - }, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - }, - Category: { - name: "Category", - type: FieldType.STRING, - }, - } as ViewV2Schema, - } - - await config.api.viewV2.create(newView, { - status: 201, - }) - }) - - it("does not persist non-visible fields", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "id", - schema: { - id: { visible: true }, - Price: { visible: true }, - Category: { visible: false }, - }, - } - const res = await config.api.viewV2.create(newView) - - expect(res).toEqual({ - ...newView, - schema: { - id: { visible: true }, - Price: { visible: true }, - Category: { visible: false }, - }, - id: expect.any(String), - version: 2, - }) - }) - - it("throws bad request when the schema fields are not valid", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - nonExisting: { - visible: true, - }, - }, - } - await config.api.viewV2.create(newView, { - status: 400, - body: { - message: - 'Field "nonExisting" is not valid for the requested table', - }, - }) - }) - - describe("readonly fields", () => { - it("readonly fields are persisted", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, + queryUI: { + logicalOperator: UILogicalOperator.ALL, + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, - }) - ) + ], + }, + id: expect.any(String), + version: 2, + } + expect(res).toEqual(expected) + }) + + it("persist only UI schema overrides", async () => { const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, schema: { + id: { + name: "id", + type: FieldType.NUMBER, + visible: true, + }, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + order: 1, + width: 100, + }, + Category: { + name: "Category", + type: FieldType.STRING, + visible: false, + icon: "ic", + }, + } as ViewV2Schema, + } + + const createdView = await config.api.viewV2.create(newView) + + expect(createdView).toEqual({ + ...newView, + schema: { + id: { visible: true }, + Price: { + visible: true, + order: 1, + width: 100, + }, + Category: { + visible: false, + icon: "ic", + }, + }, + id: createdView.id, + version: 2, + }) + }) + + it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { + name: "id", + type: FieldType.NUMBER, + autocolumn: true, + visible: true, + }, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + }, + Category: { + name: "Category", + type: FieldType.STRING, + }, + } as ViewV2Schema, + } + + await config.api.viewV2.create(newView, { + status: 201, + }) + }) + + it("does not persist non-visible fields", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + primaryDisplay: "id", + schema: { + id: { visible: true }, + Price: { visible: true }, + Category: { visible: false }, + }, + } + const res = await config.api.viewV2.create(newView) + + expect(res).toEqual({ + ...newView, + schema: { + id: { visible: true }, + Price: { visible: true }, + Category: { visible: false }, + }, + id: expect.any(String), + version: 2, + }) + }) + + it("throws bad request when the schema fields are not valid", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + nonExisting: { + visible: true, + }, + }, + } + await config.api.viewV2.create(newView, { + status: 400, + body: { + message: + 'Field "nonExisting" is not valid for the requested table', + }, + }) + }) + + describe("readonly fields", () => { + it("readonly fields are persisted", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + description: { + visible: true, + readonly: true, + }, + }, + } + + const res = await config.api.viewV2.create(newView) + expect(res.schema).toEqual({ id: { visible: true }, name: { visible: true, @@ -401,63 +420,122 @@ datasourceDescribe( visible: true, readonly: true, }, - }, - } - - const res = await config.api.viewV2.create(newView) - expect(res.schema).toEqual({ - id: { visible: true }, - name: { - visible: true, - readonly: true, - }, - description: { - visible: true, - readonly: true, - }, - }) - }) - - it("required fields cannot be marked as readonly", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, }) - ) + }) - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - name: { - visible: true, - readonly: true, + it("required fields cannot be marked as readonly", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, }, - }, - } + } - await config.api.viewV2.create(newView, { - status: 400, - body: { - message: - 'You can\'t make "name" readonly because it is a required field.', + await config.api.viewV2.create(newView, { status: 400, - }, + body: { + message: + 'You can\'t make "name" readonly because it is a required field.', + status: 400, + }, + }) + }) + + it("readonly fields must be visible", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: false, + readonly: true, + }, + }, + } + + await config.api.viewV2.create(newView, { + status: 400, + body: { + message: + 'Field "name" must be visible if you want to make it readonly', + status: 400, + }, + }) + }) + + it("readonly fields can be used on free license", async () => { + mocks.licenses.useCloudFree() + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + }, + } + + await config.api.viewV2.create(newView, { + status: 201, + }) }) }) - it("readonly fields must be visible", async () => { + it("display fields must be visible", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { @@ -476,11 +554,11 @@ datasourceDescribe( const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, + primaryDisplay: "name", schema: { id: { visible: true }, name: { visible: false, - readonly: true, }, }, } @@ -489,14 +567,13 @@ datasourceDescribe( status: 400, body: { message: - 'Field "name" must be visible if you want to make it readonly', + 'You can\'t hide "name" because it is the display column.', status: 400, }, }) }) - it("readonly fields can be used on free license", async () => { - mocks.licenses.useCloudFree() + it("display fields can be readonly", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { @@ -515,6 +592,7 @@ datasourceDescribe( const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, + primaryDisplay: "name", schema: { id: { visible: true }, name: { @@ -528,134 +606,9 @@ datasourceDescribe( status: 201, }) }) - }) - it("display fields must be visible", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, - }) - ) - - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "name", - schema: { - id: { visible: true }, - name: { - visible: false, - }, - }, - } - - await config.api.viewV2.create(newView, { - status: 400, - body: { - message: - 'You can\'t hide "name" because it is the display column.', - status: 400, - }, - }) - }) - - it("display fields can be readonly", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, - }) - ) - - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "name", - schema: { - id: { visible: true }, - name: { - visible: true, - readonly: true, - }, - }, - } - - await config.api.viewV2.create(newView, { - status: 201, - }) - }) - - it("can create a view with calculation fields", async () => { - let view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - }, - }) - - expect(Object.keys(view.schema!)).toHaveLength(1) - - let sum = view.schema!.sum as NumericCalculationFieldMetadata - expect(sum).toBeDefined() - expect(sum.calculationType).toEqual(CalculationType.SUM) - expect(sum.field).toEqual("Price") - - view = await config.api.viewV2.get(view.id) - sum = view.schema!.sum as NumericCalculationFieldMetadata - expect(sum).toBeDefined() - expect(sum.calculationType).toEqual(CalculationType.SUM) - expect(sum.field).toEqual("Price") - }) - - it("cannot create a view with calculation fields unless it has the right type", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - "Calculation fields are not allowed in non-calculation views", - }, - } - ) - }) - - it("cannot create a calculation view with more than 5 aggregations", async () => { - await config.api.viewV2.create( - { + it("can create a view with calculation fields", async () => { + let view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), type: ViewV2Type.CALCULATION, @@ -665,622 +618,33 @@ datasourceDescribe( calculationType: CalculationType.SUM, field: "Price", }, - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - countDistinct: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - min: { - visible: true, - calculationType: CalculationType.MIN, - field: "Price", - }, - max: { - visible: true, - calculationType: CalculationType.MAX, - field: "Price", - }, - avg: { - visible: true, - calculationType: CalculationType.AVG, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - "Calculation views can only have a maximum of 5 fields", - }, - } - ) - }) - - it("cannot create a calculation view with duplicate calculations", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - sum2: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - 'Duplicate calculation on field "Price", calculation type "sum"', - }, - } - ) - }) - - it("finds duplicate counts", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - 'Duplicate calculation on field "Price", calculation type "count"', - }, - } - ) - }) - - it("finds duplicate count distincts", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - 'Duplicate calculation on field "Price", calculation type "count distinct"', - }, - } - ) - }) - - it("does not confuse counts and count distincts in the duplicate check", async () => { - await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - }, - }) - }) - - it("does not confuse counts on different fields in the duplicate check", async () => { - await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Category", - }, - }, - }) - }) - - it("does not get confused when a calculation field shadows a basic one", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - age: { - name: "age", - type: FieldType.NUMBER, - }, }, }) - ) - await config.api.row.bulkImport(table._id!, { - rows: [{ age: 1 }, { age: 2 }, { age: 3 }], + expect(Object.keys(view.schema!)).toHaveLength(1) + + let sum = view.schema!.sum as NumericCalculationFieldMetadata + expect(sum).toBeDefined() + expect(sum.calculationType).toEqual(CalculationType.SUM) + expect(sum.field).toEqual("Price") + + view = await config.api.viewV2.get(view.id) + sum = view.schema!.sum as NumericCalculationFieldMetadata + expect(sum).toBeDefined() + expect(sum.calculationType).toEqual(CalculationType.SUM) + expect(sum.field).toEqual("Price") }) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - age: { - visible: true, - calculationType: CalculationType.SUM, - field: "age", - }, - }, - }) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].age).toEqual(6) - }) - - // We don't allow the creation of tables with most JsonTypes when using - // external datasources. - isInternal && - it("cannot use complex types as group-by fields", async () => { - for (const type of JsonTypes) { - const field = { name: "field", type } as FieldSchema - const table = await config.api.table.save( - saveTableRequest({ schema: { field } }) - ) - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - field: { visible: true }, - }, - }, - { - status: 400, - body: { - message: `Grouping by fields of type "${type}" is not supported`, - }, - } - ) - } - }) - - isInternal && - it("shouldn't trigger a complex type check on a group by field if field is invisible", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - field: { - name: "field", - type: FieldType.JSON, - }, - }, - }) - ) - + it("cannot create a view with calculation fields unless it has the right type", async () => { await config.api.viewV2.create( { tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, schema: { - field: { visible: false }, - }, - }, - { - status: 201, - } - ) - }) - }) - - describe("update", () => { - let view: ViewV2 - let table: Table - - beforeEach(async () => { - table = await config.api.table.save(priceTable()) - - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - }, - }) - }) - - it("can update an existing view data", async () => { - const tableId = table._id! - await config.api.viewV2.update({ - ...view, - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - }) - - const expected: ViewV2 = { - ...view, - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - // Should also update queryUI because query was not previously set. - queryUI: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - logicalOperator: UILogicalOperator.ALL, - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - }, - ], - }, - schema: expect.anything(), - } - - expect((await config.api.table.get(tableId)).views).toEqual({ - [view.name]: expected, - }) - }) - - it("can update all fields", async () => { - const tableId = table._id! - - const updatedData: Required< - Omit - > = { - version: view.version, - id: view.id, - tableId, - name: view.name, - primaryDisplay: "Price", - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "newValue", - }, - ], - sort: { - field: generator.word(), - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - schema: { - id: { visible: true }, - Category: { - visible: false, - }, - Price: { - visible: true, - readonly: true, - }, - }, - } - await config.api.viewV2.update(updatedData) - - const expected: ViewV2 = { - ...updatedData, - // queryUI gets generated from query - queryUI: { - logicalOperator: UILogicalOperator.ALL, - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "newValue", - }, - ], - }, - ], - }, - schema: { - ...table.schema, - id: expect.objectContaining({ - visible: true, - }), - Category: expect.objectContaining({ - visible: false, - }), - Price: expect.objectContaining({ - visible: true, - readonly: true, - }), - }, - } - - expect((await config.api.table.get(tableId)).views).toEqual({ - [view.name]: expected, - }) - }) - - it("can update an existing view name", async () => { - const tableId = table._id! - const newName = generator.guid() - await config.api.viewV2.update({ ...view, name: newName }) - - expect(await config.api.table.get(tableId)).toEqual( - expect.objectContaining({ - views: { - [newName]: { - ...view, - name: newName, - schema: expect.anything(), - }, - }, - }) - ) - }) - - it("cannot update an unexisting views nor edit ids", async () => { - const tableId = table._id! - await config.api.viewV2.update( - { ...view, id: generator.guid() }, - { status: 404 } - ) - - expect(await config.api.table.get(tableId)).toEqual( - expect.objectContaining({ - views: { - [view.name]: { - ...view, - schema: expect.anything(), - }, - }, - }) - ) - }) - - it("cannot update views with the wrong tableId", async () => { - const tableId = table._id! - await config.api.viewV2.update( - { - ...view, - tableId: generator.guid(), - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - }, - { status: 404 } - ) - - expect(await config.api.table.get(tableId)).toEqual( - expect.objectContaining({ - views: { - [view.name]: { - ...view, - schema: expect.anything(), - }, - }, - }) - ) - }) - - isInternal && - it("cannot update views v1", async () => { - const viewV1 = await config.api.legacyView.save({ - tableId: table._id!, - name: generator.guid(), - filters: [], - schema: {}, - }) - - await config.api.viewV2.update(viewV1 as unknown as ViewV2, { - status: 400, - body: { - message: "Only views V2 can be updated", - status: 400, - }, - }) - }) - - it("cannot update the a view with unmatching ids between url and body", async () => { - const anotherView = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - }, - }) - const result = await config - .request!.put(`/api/v2/views/${anotherView.id}`) - .send(view) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(400) - - expect(result.body).toEqual({ - message: "View id does not match between the body and the uri path", - status: 400, - }) - }) - - it("updates only UI schema overrides", async () => { - const updatedView = await config.api.viewV2.update({ - ...view, - schema: { - ...view.schema, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - order: 1, - width: 100, - }, - Category: { - name: "Category", - type: FieldType.STRING, - visible: false, - icon: "ic", - }, - } as ViewV2Schema, - }) - - expect(updatedView).toEqual({ - ...view, - schema: { - id: { visible: true }, - Price: { - visible: true, - order: 1, - width: 100, - }, - Category: { visible: false, icon: "ic" }, - }, - id: view.id, - version: 2, - }) - }) - - it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { - await config.api.viewV2.update( - { - ...view, - schema: { - ...view.schema, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - }, - Category: { - name: "Category", - type: FieldType.STRING, - }, - } as ViewV2Schema, - }, - { - status: 200, - } - ) - }) - - it("cannot update view type after creation", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - }) - - await config.api.viewV2.update( - { - ...view, - type: ViewV2Type.CALCULATION, - }, - { - status: 400, - body: { - message: "Cannot update view type after creation", - }, - } - ) - }) - - isInternal && - it("updating schema will only validate modified field", async () => { - let view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - Category: { visible: true }, - }, - }) - - // Update the view to an invalid state - const tableToUpdate = await config.api.table.get(table._id!) - ;(tableToUpdate.views![view.name] as ViewV2).schema!.id.visible = - false - await db.getDB(config.appId!).put(tableToUpdate) - - view = await config.api.viewV2.get(view.id) - await config.api.viewV2.update( - { - ...view, - schema: { - ...view.schema, - Price: { - visible: false, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", }, }, }, @@ -1288,172 +652,196 @@ datasourceDescribe( status: 400, body: { message: - 'You can\'t hide "id" because it is a required field.', - status: 400, + "Calculation fields are not allowed in non-calculation views", }, } ) }) - it("can update queryUI field and query gets regenerated", async () => { - await config.api.viewV2.update({ - ...view, - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], - }, - ], - }, - }) - - let updatedView = await config.api.viewV2.get(view.id) - let expected: SearchFilters = { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { field: "value" }, - }, - ], - }, - }, - ], - }, - } - expect(updatedView.query).toEqual(expected) - - await config.api.viewV2.update({ - ...updatedView, - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "newValue", - }, - ], - }, - ], - }, - }) - - updatedView = await config.api.viewV2.get(view.id) - expected = { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { newField: "newValue" }, - }, - ], - }, - }, - ], - }, - } - expect(updatedView.query).toEqual(expected) - }) - - it("can delete either query and it will get regenerated from queryUI", async () => { - await config.api.viewV2.update({ - ...view, - query: [ + it("cannot create a calculation view with more than 5 aggregations", async () => { + await config.api.viewV2.create( { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], - }) - - let updatedView = await config.api.viewV2.get(view.id) - expect(updatedView.queryUI).toBeDefined() - - await config.api.viewV2.update({ - ...updatedView, - query: undefined, - }) - - updatedView = await config.api.viewV2.get(view.id) - expect(updatedView.query).toBeDefined() - }) - - // This is because the conversion from queryUI -> query loses data, so you - // can't accurately reproduce the original queryUI from the query. If - // query is a LegacyFilter[] we allow it, because for Budibase v3 - // everything in the db had query set to a LegacyFilter[], and there's no - // loss of information converting from a LegacyFilter[] to a - // UISearchFilter. But we convert to a SearchFilters and that can't be - // accurately converted to a UISearchFilter. - it("can't regenerate queryUI from a query once it has been generated from a queryUI", async () => { - await config.api.viewV2.update({ - ...view, - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", + }, + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + countDistinct: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + min: { + visible: true, + calculationType: CalculationType.MIN, + field: "Price", + }, + max: { + visible: true, + calculationType: CalculationType.MAX, + field: "Price", + }, + avg: { + visible: true, + calculationType: CalculationType.AVG, + field: "Price", + }, }, - ], - }, + }, + { + status: 400, + body: { + message: + "Calculation views can only have a maximum of 5 fields", + }, + } + ) }) - let updatedView = await config.api.viewV2.get(view.id) - expect(updatedView.query).toBeDefined() - - await config.api.viewV2.update( - { - ...updatedView, - queryUI: undefined, - }, - { - status: 400, - body: { - message: "view is missing queryUI field", + it("cannot create a calculation view with duplicate calculations", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", + }, + sum2: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", + }, + }, }, - } - ) - }) + { + status: 400, + body: { + message: + 'Duplicate calculation on field "Price", calculation type "sum"', + }, + } + ) + }) - describe("calculation views", () => { - let table: Table - let view: ViewV2 + it("finds duplicate counts", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + }, + }, + { + status: 400, + body: { + message: + 'Duplicate calculation on field "Price", calculation type "count"', + }, + } + ) + }) - beforeEach(async () => { - table = await config.api.table.save( + it("finds duplicate count distincts", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + }, + }, + { + status: 400, + body: { + message: + 'Duplicate calculation on field "Price", calculation type "count distinct"', + }, + } + ) + }) + + it("does not confuse counts and count distincts in the duplicate check", async () => { + await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + }, + }) + }) + + it("does not confuse counts on different fields in the duplicate check", async () => { + await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Category", + }, + }, + }) + }) + + it("does not get confused when a calculation field shadows a basic one", async () => { + const table = await config.api.table.save( saveTableRequest({ schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { - presence: true, - }, - }, - country: { - name: "country", - type: FieldType.STRING, - }, age: { name: "age", type: FieldType.NUMBER, @@ -1462,14 +850,15 @@ datasourceDescribe( }) ) - view = await config.api.viewV2.create({ + await config.api.row.bulkImport(table._id!, { + rows: [{ age: 1 }, { age: 2 }, { age: 3 }], + }) + + const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), type: ViewV2Type.CALCULATION, schema: { - country: { - visible: true, - }, age: { visible: true, calculationType: CalculationType.SUM, @@ -1478,774 +867,1200 @@ datasourceDescribe( }, }) - await config.api.row.bulkImport(table._id!, { - rows: [ + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].age).toEqual(6) + }) + + // We don't allow the creation of tables with most JsonTypes when using + // external datasources. + isInternal && + it("cannot use complex types as group-by fields", async () => { + for (const type of JsonTypes) { + const field = { name: "field", type } as FieldSchema + const table = await config.api.table.save( + saveTableRequest({ schema: { field } }) + ) + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + field: { visible: true }, + }, + }, + { + status: 400, + body: { + message: `Grouping by fields of type "${type}" is not supported`, + }, + } + ) + } + }) + + isInternal && + it("shouldn't trigger a complex type check on a group by field if field is invisible", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + field: { + name: "field", + type: FieldType.JSON, + }, + }, + }) + ) + + await config.api.viewV2.create( { - name: "Steve", - age: 30, - country: "UK", + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + field: { visible: false }, + }, }, { - name: "Jane", - age: 31, - country: "UK", - }, + status: 201, + } + ) + }) + }) + + describe("update", () => { + let view: ViewV2 + let table: Table + + beforeEach(async () => { + table = await config.api.table.save(priceTable()) + + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + }, + }) + }) + + it("can update an existing view data", async () => { + const tableId = table._id! + await config.api.viewV2.update({ + ...view, + query: [ { - name: "Ruari", - age: 32, - country: "USA", - }, - { - name: "Alice", - age: 33, - country: "USA", + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", }, ], }) - }) - it("returns the expected rows prior to modification", async () => { - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(2) - expect(rows).toEqual( - expect.arrayContaining([ + const expected: ViewV2 = { + ...view, + query: [ { - country: "USA", - age: 65, + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", }, - { - country: "UK", - age: 61, - }, - ]) - ) - }) - - it("can remove a group by field", async () => { - delete view.schema!.country - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows).toEqual( - expect.arrayContaining([ - { - age: 126, - }, - ]) - ) - }) - - it("can remove a calculation field", async () => { - delete view.schema!.age - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(4) - - // Because the removal of the calculation field actually makes this - // no longer a calculation view, these rows will now have _id and - // _rev fields. - expect(rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ country: "UK" }), - expect.objectContaining({ country: "UK" }), - expect.objectContaining({ country: "USA" }), - expect.objectContaining({ country: "USA" }), - ]) - ) - }) - - it("can add a new group by field", async () => { - view.schema!.name = { visible: true } - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(4) - expect(rows).toEqual( - expect.arrayContaining([ - { - name: "Steve", - age: 30, - country: "UK", - }, - { - name: "Jane", - age: 31, - country: "UK", - }, - { - name: "Ruari", - age: 32, - country: "USA", - }, - { - name: "Alice", - age: 33, - country: "USA", - }, - ]) - ) - }) - - it("can add a new group by field that is invisible, even if required on the table", async () => { - view.schema!.name = { visible: false } - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(2) - expect(rows).toEqual( - expect.arrayContaining([ - { - country: "USA", - age: 65, - }, - { - country: "UK", - age: 61, - }, - ]) - ) - }) - - it("can add a new calculation field", async () => { - view.schema!.count = { - visible: true, - calculationType: CalculationType.COUNT, - field: "age", - } - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(2) - expect(rows).toEqual( - expect.arrayContaining([ - { - country: "USA", - age: 65, - count: 2, - }, - { - country: "UK", - age: 61, - count: 2, - }, - ]) - ) - }) - }) - }) - - describe("delete", () => { - let view: ViewV2 - - beforeAll(async () => { - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - }, - }) - }) - - it("can delete an existing view", async () => { - const tableId = table._id! - const getPersistedView = async () => - (await config.api.table.get(tableId)).views![view.name] - - expect(await getPersistedView()).toBeDefined() - - await config.api.viewV2.delete(view.id) - - expect(await getPersistedView()).toBeUndefined() - }) - }) - - describe.each([ - ["from view api", (view: ViewV2) => config.api.viewV2.get(view.id)], - [ - "from table", - async (view: ViewV2) => { - const table = await config.api.table.get(view.tableId) - return table.views![view.name] as ViewV2 - }, - ], - ])("read (%s)", (_, getDelegate) => { - let table: Table - let tableId: string - - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - one: { - type: FieldType.STRING, - name: "one", - }, - two: { - type: FieldType.STRING, - name: "two", - }, - three: { - type: FieldType.STRING, - name: "three", - }, - }, - }) - ) - tableId = table._id! - }) - - it("retrieves the view data with the enriched schema", async () => { - const view = await config.api.viewV2.create({ - tableId, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - - expect(await getDelegate(view)).toEqual({ - ...view, - schema: { - id: { ...table.schema["id"], visible: true }, - one: { ...table.schema["one"], visible: true }, - two: { ...table.schema["two"], visible: true }, - three: { ...table.schema["three"], visible: false }, - }, - }) - }) - - it("does not include columns removed from the table", async () => { - const view = await config.api.viewV2.create({ - tableId, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - const table = await config.api.table.get(tableId) - const { one: _, ...newSchema } = table.schema - await config.api.table.save({ ...table, schema: newSchema }) - - expect(await getDelegate(view)).toEqual({ - ...view, - schema: { - id: { ...table.schema["id"], visible: true }, - two: { ...table.schema["two"], visible: true }, - three: { ...table.schema["three"], visible: false }, - }, - }) - }) - - it("does not include columns hidden from the table", async () => { - const view = await config.api.viewV2.create({ - tableId, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - const table = await config.api.table.get(tableId) - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - two: { ...table.schema["two"], visible: false }, - }, - }) - - expect(await getDelegate(view)).toEqual({ - ...view, - schema: { - id: { ...table.schema["id"], visible: true }, - one: { ...table.schema["one"], visible: true }, - three: { ...table.schema["three"], visible: false }, - }, - }) - }) - - it("should be able to fetch readonly config after downgrades", async () => { - const res = await config.api.viewV2.create({ - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - one: { visible: true, readonly: true }, - }, - }) - - mocks.licenses.useCloudFree() - const view = await getDelegate(res) - expect(view.schema?.one).toEqual( - expect.objectContaining({ visible: true, readonly: true }) - ) - }) - - it("should fill in the queryUI field if it's missing", async () => { - const res = await config.api.viewV2.create({ - name: generator.name(), - tableId: tableId, - query: [ - { - operator: BasicOperator.EQUAL, - field: "one", - value: "1", - }, - ], - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - const table = await config.api.table.get(tableId) - const rawView = table.views![res.name] as ViewV2 - delete rawView.queryUI - - await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - - if (!rawDatasource) { - await db.put(table) - } else { - const ds = await config.api.datasource.get(datasource!._id!) - ds.entities![table.name] = table - const updatedDs = { - ...rawDatasource, - _id: ds._id, - _rev: ds._rev, - entities: ds.entities, - } - await db.put(updatedDs) - } - }) - - const view = await getDelegate(res) - const expected: UISearchFilter = { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - logicalOperator: UILogicalOperator.ALL, - groups: [ - { + ], + // Should also update queryUI because query was not previously set. + queryUI: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, logicalOperator: UILogicalOperator.ALL, - filters: [ + groups: [ { - operator: BasicOperator.EQUAL, - field: "one", - value: "1", + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", + }, + ], }, ], }, - ], - } - expect(view.queryUI).toEqual(expected) - }) - }) + schema: expect.anything(), + } - describe("updating table schema", () => { - describe("existing columns changed to required", () => { - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - id: { - name: "id", - type: FieldType.NUMBER, - autocolumn: true, - }, - name: { - name: "name", - type: FieldType.STRING, - }, - }, - }) - ) - }) - - it("allows updating when no views constrains the field", async () => { - await config.api.viewV2.create({ - name: "view a", - tableId: table._id!, - schema: { - id: { visible: true }, - name: { visible: true }, - }, + expect((await config.api.table.get(tableId)).views).toEqual({ + [view.name]: expected, }) - - table = await config.api.table.get(table._id!) - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: { allowEmpty: false } }, - }, - }, - }, - { status: 200 } - ) }) - it("rejects if field is readonly in any view", async () => { - await config.api.viewV2.create({ - name: "view a", - tableId: table._id!, + it("can update all fields", async () => { + const tableId = table._id! + + const updatedData: Required< + Omit + > = { + version: view.version, + id: view.id, + tableId, + name: view.name, + primaryDisplay: "Price", + query: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "newValue", + }, + ], + sort: { + field: generator.word(), + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, schema: { id: { visible: true }, - name: { + Category: { + visible: false, + }, + Price: { visible: true, readonly: true, }, }, - }) + } + await config.api.viewV2.update(updatedData) - table = await config.api.table.get(table._id!) - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, + const expected: ViewV2 = { + ...updatedData, + // queryUI gets generated from query + queryUI: { + logicalOperator: UILogicalOperator.ALL, + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "newValue", + }, + ], + }, + ], + }, + schema: { + ...table.schema, + id: expect.objectContaining({ + visible: true, + }), + Category: expect.objectContaining({ + visible: false, + }), + Price: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + } + + expect((await config.api.table.get(tableId)).views).toEqual({ + [view.name]: expected, + }) + }) + + it("can update an existing view name", async () => { + const tableId = table._id! + const newName = generator.guid() + await config.api.viewV2.update({ ...view, name: newName }) + + expect(await config.api.table.get(tableId)).toEqual( + expect.objectContaining({ + views: { + [newName]: { + ...view, + name: newName, + schema: expect.anything(), }, }, + }) + ) + }) + + it("cannot update an unexisting views nor edit ids", async () => { + const tableId = table._id! + await config.api.viewV2.update( + { ...view, id: generator.guid() }, + { status: 404 } + ) + + expect(await config.api.table.get(tableId)).toEqual( + expect.objectContaining({ + views: { + [view.name]: { + ...view, + schema: expect.anything(), + }, + }, + }) + ) + }) + + it("cannot update views with the wrong tableId", async () => { + const tableId = table._id! + await config.api.viewV2.update( + { + ...view, + tableId: generator.guid(), + query: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", + }, + ], + }, + { status: 404 } + ) + + expect(await config.api.table.get(tableId)).toEqual( + expect.objectContaining({ + views: { + [view.name]: { + ...view, + schema: expect.anything(), + }, + }, + }) + ) + }) + + isInternal && + it("cannot update views v1", async () => { + const viewV1 = await config.api.legacyView.save({ + tableId: table._id!, + name: generator.guid(), + filters: [], + schema: {}, + }) + + await config.api.viewV2.update(viewV1 as unknown as ViewV2, { + status: 400, + body: { + message: "Only views V2 can be updated", + status: 400, + }, + }) + }) + + it("cannot update the a view with unmatching ids between url and body", async () => { + const anotherView = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + }, + }) + const result = await config + .request!.put(`/api/v2/views/${anotherView.id}`) + .send(view) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(400) + + expect(result.body).toEqual({ + message: + "View id does not match between the body and the uri path", + status: 400, + }) + }) + + it("updates only UI schema overrides", async () => { + const updatedView = await config.api.viewV2.update({ + ...view, + schema: { + ...view.schema, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + order: 1, + width: 100, + }, + Category: { + name: "Category", + type: FieldType.STRING, + visible: false, + icon: "ic", + }, + } as ViewV2Schema, + }) + + expect(updatedView).toEqual({ + ...view, + schema: { + id: { visible: true }, + Price: { + visible: true, + order: 1, + width: 100, + }, + Category: { visible: false, icon: "ic" }, + }, + id: view.id, + version: 2, + }) + }) + + it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { + await config.api.viewV2.update( + { + ...view, + schema: { + ...view.schema, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + }, + Category: { + name: "Category", + type: FieldType.STRING, + }, + } as ViewV2Schema, + }, + { + status: 200, + } + ) + }) + + it("cannot update view type after creation", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + Price: { + visible: true, + }, + }, + }) + + await config.api.viewV2.update( + { + ...view, + type: ViewV2Type.CALCULATION, }, { status: 400, body: { - status: 400, - message: - 'To make field "name" required, this field must be present and writable in views: view a.', + message: "Cannot update view type after creation", }, } ) }) - it("rejects if field is hidden in any view", async () => { - await config.api.viewV2.create({ - name: "view a", - tableId: table._id!, - schema: { id: { visible: true } }, - }) - - table = await config.api.table.get(table._id!) - await config.api.table.save( - { - ...table, + isInternal && + it("updating schema will only validate modified field", async () => { + let view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, + id: { visible: true }, + Price: { + visible: true, + }, + Category: { visible: true }, + }, + }) + + // Update the view to an invalid state + const tableToUpdate = await config.api.table.get(table._id!) + ;(tableToUpdate.views![view.name] as ViewV2).schema!.id.visible = + false + await db.getDB(config.appId!).put(tableToUpdate) + + view = await config.api.viewV2.get(view.id) + await config.api.viewV2.update( + { + ...view, + schema: { + ...view.schema, + Price: { + visible: false, + }, }, }, + { + status: 400, + body: { + message: + 'You can\'t hide "id" because it is a required field.', + status: 400, + }, + } + ) + }) + + it("can update queryUI field and query gets regenerated", async () => { + await config.api.viewV2.update({ + ...view, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], + }, + ], + }, + }) + + let updatedView = await config.api.viewV2.get(view.id) + let expected: SearchFilters = { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { field: "value" }, + }, + ], + }, + }, + ], + }, + } + expect(updatedView.query).toEqual(expected) + + await config.api.viewV2.update({ + ...updatedView, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "newValue", + }, + ], + }, + ], + }, + }) + + updatedView = await config.api.viewV2.get(view.id) + expected = { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { newField: "newValue" }, + }, + ], + }, + }, + ], + }, + } + expect(updatedView.query).toEqual(expected) + }) + + it("can delete either query and it will get regenerated from queryUI", async () => { + await config.api.viewV2.update({ + ...view, + query: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], + }) + + let updatedView = await config.api.viewV2.get(view.id) + expect(updatedView.queryUI).toBeDefined() + + await config.api.viewV2.update({ + ...updatedView, + query: undefined, + }) + + updatedView = await config.api.viewV2.get(view.id) + expect(updatedView.query).toBeDefined() + }) + + // This is because the conversion from queryUI -> query loses data, so you + // can't accurately reproduce the original queryUI from the query. If + // query is a LegacyFilter[] we allow it, because for Budibase v3 + // everything in the db had query set to a LegacyFilter[], and there's no + // loss of information converting from a LegacyFilter[] to a + // UISearchFilter. But we convert to a SearchFilters and that can't be + // accurately converted to a UISearchFilter. + it("can't regenerate queryUI from a query once it has been generated from a queryUI", async () => { + await config.api.viewV2.update({ + ...view, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], + }, + ], + }, + }) + + let updatedView = await config.api.viewV2.get(view.id) + expect(updatedView.query).toBeDefined() + + await config.api.viewV2.update( + { + ...updatedView, + queryUI: undefined, }, { status: 400, body: { - status: 400, - message: - 'To make field "name" required, this field must be present and writable in views: view a.', + message: "view is missing queryUI field", }, } ) }) + + describe("calculation views", () => { + let table: Table + let view: ViewV2 + + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { + presence: true, + }, + }, + country: { + name: "country", + type: FieldType.STRING, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + }, + }) + ) + + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + country: { + visible: true, + }, + age: { + visible: true, + calculationType: CalculationType.SUM, + field: "age", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Steve", + age: 30, + country: "UK", + }, + { + name: "Jane", + age: 31, + country: "UK", + }, + { + name: "Ruari", + age: 32, + country: "USA", + }, + { + name: "Alice", + age: 33, + country: "USA", + }, + ], + }) + }) + + it("returns the expected rows prior to modification", async () => { + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(2) + expect(rows).toEqual( + expect.arrayContaining([ + { + country: "USA", + age: 65, + }, + { + country: "UK", + age: 61, + }, + ]) + ) + }) + + it("can remove a group by field", async () => { + delete view.schema!.country + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows).toEqual( + expect.arrayContaining([ + { + age: 126, + }, + ]) + ) + }) + + it("can remove a calculation field", async () => { + delete view.schema!.age + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(4) + + // Because the removal of the calculation field actually makes this + // no longer a calculation view, these rows will now have _id and + // _rev fields. + expect(rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ country: "UK" }), + expect.objectContaining({ country: "UK" }), + expect.objectContaining({ country: "USA" }), + expect.objectContaining({ country: "USA" }), + ]) + ) + }) + + it("can add a new group by field", async () => { + view.schema!.name = { visible: true } + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(4) + expect(rows).toEqual( + expect.arrayContaining([ + { + name: "Steve", + age: 30, + country: "UK", + }, + { + name: "Jane", + age: 31, + country: "UK", + }, + { + name: "Ruari", + age: 32, + country: "USA", + }, + { + name: "Alice", + age: 33, + country: "USA", + }, + ]) + ) + }) + + it("can add a new group by field that is invisible, even if required on the table", async () => { + view.schema!.name = { visible: false } + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(2) + expect(rows).toEqual( + expect.arrayContaining([ + { + country: "USA", + age: 65, + }, + { + country: "UK", + age: 61, + }, + ]) + ) + }) + + it("can add a new calculation field", async () => { + view.schema!.count = { + visible: true, + calculationType: CalculationType.COUNT, + field: "age", + } + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(2) + expect(rows).toEqual( + expect.arrayContaining([ + { + country: "USA", + age: 65, + count: 2, + }, + { + country: "UK", + age: 61, + count: 2, + }, + ]) + ) + }) + }) }) - describe("foreign relationship columns", () => { - const createAuxTable = () => - config.api.table.save( + describe("delete", () => { + let view: ViewV2 + + beforeAll(async () => { + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + }, + }) + }) + + it("can delete an existing view", async () => { + const tableId = table._id! + const getPersistedView = async () => + (await config.api.table.get(tableId)).views![view.name] + + expect(await getPersistedView()).toBeDefined() + + await config.api.viewV2.delete(view.id) + + expect(await getPersistedView()).toBeUndefined() + }) + }) + + describe.each([ + ["from view api", (view: ViewV2) => config.api.viewV2.get(view.id)], + [ + "from table", + async (view: ViewV2) => { + const table = await config.api.table.get(view.tableId) + return table.views![view.name] as ViewV2 + }, + ], + ])("read (%s)", (_, getDelegate) => { + let table: Table + let tableId: string + + beforeEach(async () => { + table = await config.api.table.save( saveTableRequest({ - primaryDisplay: "name", schema: { - name: { name: "name", type: FieldType.STRING }, - age: { name: "age", type: FieldType.NUMBER }, + one: { + type: FieldType.STRING, + name: "one", + }, + two: { + type: FieldType.STRING, + name: "two", + }, + three: { + type: FieldType.STRING, + name: "three", + }, }, }) ) + tableId = table._id! + }) - const createMainTable = async ( - links: { - name: string - tableId: string - fk: string - }[] - ) => { - const table = await config.api.table.save( - saveTableRequest({ - schema: {}, - }) - ) + it("retrieves the view data with the enriched schema", async () => { + const view = await config.api.viewV2.create({ + tableId, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + + expect(await getDelegate(view)).toEqual({ + ...view, + schema: { + id: { ...table.schema["id"], visible: true }, + one: { ...table.schema["one"], visible: true }, + two: { ...table.schema["two"], visible: true }, + three: { ...table.schema["three"], visible: false }, + }, + }) + }) + + it("does not include columns removed from the table", async () => { + const view = await config.api.viewV2.create({ + tableId, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + const table = await config.api.table.get(tableId) + const { one: _, ...newSchema } = table.schema + await config.api.table.save({ ...table, schema: newSchema }) + + expect(await getDelegate(view)).toEqual({ + ...view, + schema: { + id: { ...table.schema["id"], visible: true }, + two: { ...table.schema["two"], visible: true }, + three: { ...table.schema["three"], visible: false }, + }, + }) + }) + + it("does not include columns hidden from the table", async () => { + const view = await config.api.viewV2.create({ + tableId, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + const table = await config.api.table.get(tableId) await config.api.table.save({ ...table, schema: { ...table.schema, - ...links.reduce((acc, c) => { - acc[c.name] = { - name: c.name, - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: c.tableId, - fieldName: c.fk, - constraints: { type: "array" }, - } - return acc - }, {}), + two: { ...table.schema["two"], visible: false }, }, }) - return table - } - const createView = async (tableId: string, schema: ViewV2Schema) => - await config.api.viewV2.create({ - name: generator.guid(), - tableId, - schema, + expect(await getDelegate(view)).toEqual({ + ...view, + schema: { + id: { ...table.schema["id"], visible: true }, + one: { ...table.schema["one"], visible: true }, + three: { ...table.schema["three"], visible: false }, + }, + }) + }) + + it("should be able to fetch readonly config after downgrades", async () => { + const res = await config.api.viewV2.create({ + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + one: { visible: true, readonly: true }, + }, }) - const renameColumn = async (table: Table, renaming: RenameColumn) => { - const newSchema = { ...table.schema } - newSchema[renaming.updated] = { - ...table.schema[renaming.old], - name: renaming.updated, + mocks.licenses.useCloudFree() + const view = await getDelegate(res) + expect(view.schema?.one).toEqual( + expect.objectContaining({ visible: true, readonly: true }) + ) + }) + + it("should fill in the queryUI field if it's missing", async () => { + const res = await config.api.viewV2.create({ + name: generator.name(), + tableId: tableId, + query: [ + { + operator: BasicOperator.EQUAL, + field: "one", + value: "1", + }, + ], + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + const table = await config.api.table.get(tableId) + const rawView = table.views![res.name] as ViewV2 + delete rawView.queryUI + + await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + + if (!rawDatasource) { + await db.put(table) + } else { + const ds = await config.api.datasource.get(datasource!._id!) + ds.entities![table.name] = table + const updatedDs = { + ...rawDatasource, + _id: ds._id, + _rev: ds._rev, + entities: ds.entities, + } + await db.put(updatedDs) + } + }) + + const view = await getDelegate(res) + const expected: UISearchFilter = { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + logicalOperator: UILogicalOperator.ALL, + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "one", + value: "1", + }, + ], + }, + ], } - delete newSchema[renaming.old] + expect(view.queryUI).toEqual(expected) + }) + }) - await config.api.table.save({ - ...table, - schema: newSchema, - _rename: renaming, - }) - } - - it("updating a column will update link columns configuration", async () => { - let auxTable = await createAuxTable() - - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - ]) - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const view = await createView(table._id!, { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, - }, - }, - }) - - await renameColumn(auxTable, { old: "age", updated: "dob" }) - - const updatedView = await config.api.viewV2.get(view.id) - expect(updatedView).toEqual( - expect.objectContaining({ - schema: expect.objectContaining({ - aux: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - dob: expect.objectContaining({ - visible: true, - readonly: true, - }), + describe("updating table schema", () => { + describe("existing columns changed to required", () => { + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + id: { + name: "id", + type: FieldType.NUMBER, + autocolumn: true, }, - }), - }), + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + }) + + it("allows updating when no views constrains the field", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { + id: { visible: true }, + name: { visible: true }, + }, }) - ) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: { allowEmpty: false } }, + }, + }, + }, + { status: 200 } + ) + }) + + it("rejects if field is readonly in any view", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }, + { + status: 400, + body: { + status: 400, + message: + 'To make field "name" required, this field must be present and writable in views: view a.', + }, + } + ) + }) + + it("rejects if field is hidden in any view", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { id: { visible: true } }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }, + { + status: 400, + body: { + status: 400, + message: + 'To make field "name" required, this field must be present and writable in views: view a.', + }, + } + ) + }) }) - it("handles multiple fields using the same table", async () => { - let auxTable = await createAuxTable() + describe("foreign relationship columns", () => { + const createAuxTable = () => + config.api.table.save( + saveTableRequest({ + primaryDisplay: "name", + schema: { + name: { name: "name", type: FieldType.STRING }, + age: { name: "age", type: FieldType.NUMBER }, + }, + }) + ) - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - { name: "aux2", tableId: auxTable._id!, fk: "fk_aux2" }, - ]) - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const view = await createView(table._id!, { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, + const createMainTable = async ( + links: { + name: string + tableId: string + fk: string + }[] + ) => { + const table = await config.api.table.save( + saveTableRequest({ + schema: {}, + }) + ) + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + ...links.reduce((acc, c) => { + acc[c.name] = { + name: c.name, + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: c.tableId, + fieldName: c.fk, + constraints: { type: "array" }, + } + return acc + }, {}), }, - }, - aux2: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, - }, - }, - }) - - await renameColumn(auxTable, { old: "age", updated: "dob" }) - - const updatedView = await config.api.viewV2.get(view.id) - expect(updatedView).toEqual( - expect.objectContaining({ - schema: expect.objectContaining({ - aux: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - dob: expect.objectContaining({ - visible: true, - readonly: true, - }), - }, - }), - aux2: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - dob: expect.objectContaining({ - visible: true, - readonly: true, - }), - }, - }), - }), }) - ) - }) - - it("does not rename columns with the same name but from other tables", async () => { - let auxTable = await createAuxTable() - let aux2Table = await createAuxTable() - - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - { name: "aux2", tableId: aux2Table._id!, fk: "fk_aux2" }, - ]) - - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const view = await createView(table._id!, { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - }, - }, - aux2: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - }, - }, - }) - - await renameColumn(auxTable, { old: "name", updated: "fullName" }) - - const updatedView = await config.api.viewV2.get(view.id) - expect(updatedView).toEqual( - expect.objectContaining({ - schema: expect.objectContaining({ - aux: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - fullName: expect.objectContaining({ - visible: true, - readonly: true, - }), - age: expect.objectContaining({ - visible: false, - readonly: false, - }), - }, - }), - aux2: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - age: expect.objectContaining({ - visible: false, - readonly: false, - }), - }, - }), - }), - }) - ) - }) - - it("updates all views references", async () => { - let auxTable = await createAuxTable() - - const table1 = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table1" }, - ]) - const table2 = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table2" }, - ]) - - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const viewSchema = { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, - }, - }, + return table } - const view1 = await createView(table1._id!, viewSchema) - const view2 = await createView(table1._id!, viewSchema) - const view3 = await createView(table2._id!, viewSchema) - await renameColumn(auxTable, { old: "age", updated: "dob" }) + const createView = async (tableId: string, schema: ViewV2Schema) => + await config.api.viewV2.create({ + name: generator.guid(), + tableId, + schema, + }) + + const renameColumn = async ( + table: Table, + renaming: RenameColumn + ) => { + const newSchema = { ...table.schema } + newSchema[renaming.updated] = { + ...table.schema[renaming.old], + name: renaming.updated, + } + delete newSchema[renaming.old] + + await config.api.table.save({ + ...table, + schema: newSchema, + _rename: renaming, + }) + } + + it("updating a column will update link columns configuration", async () => { + let auxTable = await createAuxTable() + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + ]) + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const view = await createView(table._id!, { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + }) + + await renameColumn(auxTable, { old: "age", updated: "dob" }) - for (const view of [view1, view2, view3]) { const updatedView = await config.api.viewV2.get(view.id) expect(updatedView).toEqual( expect.objectContaining({ @@ -2269,74 +2084,605 @@ datasourceDescribe( }), }) ) - } + }) + + it("handles multiple fields using the same table", async () => { + let auxTable = await createAuxTable() + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + { name: "aux2", tableId: auxTable._id!, fk: "fk_aux2" }, + ]) + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const view = await createView(table._id!, { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + aux2: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + }) + + await renameColumn(auxTable, { old: "age", updated: "dob" }) + + const updatedView = await config.api.viewV2.get(view.id) + expect(updatedView).toEqual( + expect.objectContaining({ + schema: expect.objectContaining({ + aux: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + dob: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + }), + aux2: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + dob: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + }), + }), + }) + ) + }) + + it("does not rename columns with the same name but from other tables", async () => { + let auxTable = await createAuxTable() + let aux2Table = await createAuxTable() + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + { name: "aux2", tableId: aux2Table._id!, fk: "fk_aux2" }, + ]) + + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const view = await createView(table._id!, { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + }, + }, + aux2: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + }, + }, + }) + + await renameColumn(auxTable, { old: "name", updated: "fullName" }) + + const updatedView = await config.api.viewV2.get(view.id) + expect(updatedView).toEqual( + expect.objectContaining({ + schema: expect.objectContaining({ + aux: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + fullName: expect.objectContaining({ + visible: true, + readonly: true, + }), + age: expect.objectContaining({ + visible: false, + readonly: false, + }), + }, + }), + aux2: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + age: expect.objectContaining({ + visible: false, + readonly: false, + }), + }, + }), + }), + }) + ) + }) + + it("updates all views references", async () => { + let auxTable = await createAuxTable() + + const table1 = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table1" }, + ]) + const table2 = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table2" }, + ]) + + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const viewSchema = { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + } + const view1 = await createView(table1._id!, viewSchema) + const view2 = await createView(table1._id!, viewSchema) + const view3 = await createView(table2._id!, viewSchema) + + await renameColumn(auxTable, { old: "age", updated: "dob" }) + + for (const view of [view1, view2, view3]) { + const updatedView = await config.api.viewV2.get(view.id) + expect(updatedView).toEqual( + expect.objectContaining({ + schema: expect.objectContaining({ + aux: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + dob: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + }), + }), + }) + ) + } + }) + }) + }) + + describe("calculation views", () => { + it("should not remove calculation columns when modifying table schema", async () => { + let table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + }, + }) + ) + + let view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "age", + }, + }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }) + + view = await config.api.viewV2.get(view.id) + expect(Object.keys(view.schema!).sort()).toEqual([ + "age", + "id", + "name", + "sum", + ]) + }) + + describe("bigints", () => { + let table: Table + let view: ViewV2 + + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + bigint: { + name: "bigint", + type: FieldType.BIGINT, + }, + }, + }) + ) + + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "bigint", + }, + }, + }) + }) + + it("should not lose precision handling ints larger than JSs int53", async () => { + // The sum of the following 3 numbers cannot be represented by + // JavaScripts default int53 datatype for numbers, so this is a test + // that makes sure we aren't losing precision between the DB and the + // user. + await config.api.row.bulkImport(table._id!, { + rows: [ + { bigint: "1000000000000000000" }, + { bigint: "123" }, + { bigint: "321" }, + ], + }) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual("1000000000000000444") + }) + + it("should be able to handle up to 2**63 - 1 bigints", async () => { + await config.api.row.bulkImport(table._id!, { + rows: [{ bigint: "9223372036854775806" }, { bigint: "1" }], + }) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual("9223372036854775807") + }) }) }) }) - describe("calculation views", () => { - it("should not remove calculation columns when modifying table schema", async () => { - let table = await config.api.table.save( + describe("row operations", () => { + let table: Table, view: ViewV2 + beforeEach(async () => { + table = await config.api.table.save( saveTableRequest({ schema: { - name: { - name: "name", + one: { type: FieldType.STRING, name: "one" }, + two: { type: FieldType.STRING, name: "two" }, + default: { type: FieldType.STRING, - }, - age: { - name: "age", - type: FieldType.NUMBER, + name: "default", + default: "default", }, }, }) ) - - let view = await config.api.viewV2.create({ + view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "age", - }, + id: { visible: true }, + two: { visible: true }, }, }) - - table = await config.api.table.get(table._id!) - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, - }, - }, - }) - - view = await config.api.viewV2.get(view.id) - expect(Object.keys(view.schema!).sort()).toEqual([ - "age", - "id", - "name", - "sum", - ]) }) - describe("bigints", () => { - let table: Table - let view: ViewV2 + describe("create", () => { + it("should persist a new row with only the provided view fields", async () => { + const newRow = await config.api.row.save(view.id, { + tableId: table!._id, + _viewId: view.id, + one: "foo", + two: "bar", + default: "ohnoes", + }) - beforeEach(async () => { + const row = await config.api.row.get(table._id!, newRow._id!) + expect(row.one).toBeUndefined() + expect(row.two).toEqual("bar") + expect(row.default).toEqual("default") + }) + + it("can't persist readonly columns", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true, readonly: true }, + two: { visible: true }, + }, + }) + const row = await config.api.row.save(view.id, { + tableId: table!._id, + _viewId: view.id, + one: "foo", + two: "bar", + }) + + expect(row.one).toBeUndefined() + expect(row.two).toEqual("bar") + }) + + it("should not return non-view view fields for a row", async () => { + const newRow = await config.api.row.save(view.id, { + one: "foo", + two: "bar", + }) + + expect(newRow.one).toBeUndefined() + expect(newRow.two).toEqual("bar") + }) + + it("should not be possible to create a row in a calculation view", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + await config.api.row.save( + view.id, + { one: "foo" }, + { + status: 400, + body: { + message: "Cannot insert rows through a calculation view", + status: 400, + }, + } + ) + }) + }) + + describe("patch", () => { + it("should not return non-view view fields for a row", async () => { + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + const row = await config.api.row.patch(view.id, { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }) + + expect(row.one).toBeUndefined() + expect(row.two).toEqual("newBar") + }) + + it("should update only the view fields for a row", async () => { + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.patch(view.id, { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }) + + const row = await config.api.row.get(table._id!, newRow._id!) + expect(row.one).toEqual("foo") + expect(row.two).toEqual("newBar") + }) + + it("can't update readonly columns", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true, readonly: true }, + two: { visible: true }, + }, + }) + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.patch(view.id, { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }) + + const row = await config.api.row.get(table._id!, newRow._id!) + expect(row.one).toEqual("foo") + expect(row.two).toEqual("newBar") + }) + + it("should not be possible to modify a row in a calculation view", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + + await config.api.row.patch( + view.id, + { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }, + { + status: 400, + body: { + message: "Cannot update rows through a calculation view", + }, + } + ) + }) + }) + + describe("destroy", () => { + const getRowUsage = async () => { + const { total } = await config.doInContext(undefined, () => + quotas.getCurrentUsageValues( + QuotaUsageType.STATIC, + StaticQuotaName.ROWS + ) + ) + return total + } + + const assertRowUsage = async (expected: number) => { + const usage = await getRowUsage() + expect(usage).toBe(expected) + } + + it("should be able to delete a row", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + await config.api.row.bulkDelete(view.id, { rows: [createdRow] }) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + await config.api.row.get(table._id!, createdRow._id!, { + status: 404, + }) + }) + + it("should be able to delete multiple rows", async () => { + const rows = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + const rowUsage = await getRowUsage() + + await config.api.row.bulkDelete(view.id, { + rows: [rows[0], rows[2]], + }) + + await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) + + await config.api.row.get(table._id!, rows[0]._id!, { + status: 404, + }) + await config.api.row.get(table._id!, rows[2]._id!, { + status: 404, + }) + await config.api.row.get(table._id!, rows[1]._id!, { status: 200 }) + }) + + it("should not be possible to delete a row in a calculation view", async () => { + const row = await config.api.row.save(table._id!, {}) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + await config.api.row.delete( + view.id, + { _id: row._id! }, + { + status: 400, + body: { + message: "Cannot delete rows through a calculation view", + status: 400, + }, + } + ) + }) + }) + + describe("read", () => { + let view: ViewV2 + let table: Table + + beforeAll(async () => { table = await config.api.table.save( saveTableRequest({ schema: { - bigint: { - name: "bigint", - type: FieldType.BIGINT, + Country: { + type: FieldType.STRING, + name: "Country", + }, + Story: { + type: FieldType.STRING, + name: "Story", }, }, }) @@ -2345,852 +2691,552 @@ datasourceDescribe( view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "bigint", - }, - }, - }) - }) - - it("should not lose precision handling ints larger than JSs int53", async () => { - // The sum of the following 3 numbers cannot be represented by - // JavaScripts default int53 datatype for numbers, so this is a test - // that makes sure we aren't losing precision between the DB and the - // user. - await config.api.row.bulkImport(table._id!, { - rows: [ - { bigint: "1000000000000000000" }, - { bigint: "123" }, - { bigint: "321" }, - ], - }) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual("1000000000000000444") - }) - - it("should be able to handle up to 2**63 - 1 bigints", async () => { - await config.api.row.bulkImport(table._id!, { - rows: [{ bigint: "9223372036854775806" }, { bigint: "1" }], - }) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual("9223372036854775807") - }) - }) - }) - }) - - describe("row operations", () => { - let table: Table, view: ViewV2 - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - one: { type: FieldType.STRING, name: "one" }, - two: { type: FieldType.STRING, name: "two" }, - default: { - type: FieldType.STRING, - name: "default", - default: "default", - }, - }, - }) - ) - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - two: { visible: true }, - }, - }) - }) - - describe("create", () => { - it("should persist a new row with only the provided view fields", async () => { - const newRow = await config.api.row.save(view.id, { - tableId: table!._id, - _viewId: view.id, - one: "foo", - two: "bar", - default: "ohnoes", - }) - - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.one).toBeUndefined() - expect(row.two).toEqual("bar") - expect(row.default).toEqual("default") - }) - - it("can't persist readonly columns", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true, readonly: true }, - two: { visible: true }, - }, - }) - const row = await config.api.row.save(view.id, { - tableId: table!._id, - _viewId: view.id, - one: "foo", - two: "bar", - }) - - expect(row.one).toBeUndefined() - expect(row.two).toEqual("bar") - }) - - it("should not return non-view view fields for a row", async () => { - const newRow = await config.api.row.save(view.id, { - one: "foo", - two: "bar", - }) - - expect(newRow.one).toBeUndefined() - expect(newRow.two).toEqual("bar") - }) - - it("should not be possible to create a row in a calculation view", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - await config.api.row.save( - view.id, - { one: "foo" }, - { - status: 400, - body: { - message: "Cannot insert rows through a calculation view", - status: 400, - }, - } - ) - }) - }) - - describe("patch", () => { - it("should not return non-view view fields for a row", async () => { - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - const row = await config.api.row.patch(view.id, { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }) - - expect(row.one).toBeUndefined() - expect(row.two).toEqual("newBar") - }) - - it("should update only the view fields for a row", async () => { - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.patch(view.id, { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }) - - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.one).toEqual("foo") - expect(row.two).toEqual("newBar") - }) - - it("can't update readonly columns", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true, readonly: true }, - two: { visible: true }, - }, - }) - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.patch(view.id, { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }) - - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.one).toEqual("foo") - expect(row.two).toEqual("newBar") - }) - - it("should not be possible to modify a row in a calculation view", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - - await config.api.row.patch( - view.id, - { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }, - { - status: 400, - body: { - message: "Cannot update rows through a calculation view", - }, - } - ) - }) - }) - - describe("destroy", () => { - const getRowUsage = async () => { - const { total } = await config.doInContext(undefined, () => - quotas.getCurrentUsageValues( - QuotaUsageType.STATIC, - StaticQuotaName.ROWS - ) - ) - return total - } - - const assertRowUsage = async (expected: number) => { - const usage = await getRowUsage() - expect(usage).toBe(expected) - } - - it("should be able to delete a row", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - await config.api.row.bulkDelete(view.id, { rows: [createdRow] }) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - await config.api.row.get(table._id!, createdRow._id!, { - status: 404, - }) - }) - - it("should be able to delete multiple rows", async () => { - const rows = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - const rowUsage = await getRowUsage() - - await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] }) - - await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) - - await config.api.row.get(table._id!, rows[0]._id!, { - status: 404, - }) - await config.api.row.get(table._id!, rows[2]._id!, { - status: 404, - }) - await config.api.row.get(table._id!, rows[1]._id!, { status: 200 }) - }) - - it("should not be possible to delete a row in a calculation view", async () => { - const row = await config.api.row.save(table._id!, {}) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - await config.api.row.delete( - view.id, - { _id: row._id! }, - { - status: 400, - body: { - message: "Cannot delete rows through a calculation view", - status: 400, - }, - } - ) - }) - }) - - describe("read", () => { - let view: ViewV2 - let table: Table - - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ schema: { + id: { visible: true }, Country: { - type: FieldType.STRING, - name: "Country", - }, - Story: { - type: FieldType.STRING, - name: "Story", + visible: true, }, }, }) - ) + }) - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - Country: { - visible: true, - }, - }, + it("views have extra data trimmed", async () => { + let row = await config.api.row.save(view.id, { + Country: "Aussy", + Story: "aaaaa", + }) + + row = await config.api.row.get(table._id!, row._id!) + expect(row.Story).toBeUndefined() + expect(row.Country).toEqual("Aussy") }) }) - it("views have extra data trimmed", async () => { - let row = await config.api.row.save(view.id, { - Country: "Aussy", - Story: "aaaaa", - }) - - row = await config.api.row.get(table._id!, row._id!) - expect(row.Story).toBeUndefined() - expect(row.Country).toEqual("Aussy") - }) - }) - - describe("search", () => { - it("returns empty rows from view when no schema is passed", async () => { - const rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) + describe("search", () => { + it("returns empty rows from view when no schema is passed", async () => { + const rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) ) - ) - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(10) - expect(response).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - _viewId: view.id, - tableId: table._id, - id: r.id, - _id: r._id, - _rev: r._rev, - ...(isInternal - ? { - type: "row", - updatedAt: expect.any(String), - createdAt: expect.any(String), - } - : {}), - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - }), - }) - }) - - it("searching respects the view filters", async () => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - const two = await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "two", - value: "bar2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(1) - expect(response).toEqual({ - rows: expect.arrayContaining([ - { - _viewId: view.id, - tableId: table._id, - id: two.id, - two: two.two, - _id: two._id, - _rev: two._rev, - ...(isInternal - ? { - type: "row", - createdAt: expect.any(String), - updatedAt: expect.any(String), - } - : {}), - }, - ]), - ...(isInternal - ? {} - : { - hasNextPage: false, - }), - }) - }) - - it("views filters are respected even if the column is hidden", async () => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - const two = await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "two", - value: "bar2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: false }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual([ - expect.objectContaining({ _id: two._id }), - ]) - }) - - it("views without data can be returned", async () => { - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(0) - }) - - it("respects the limit parameter", async () => { - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) - ) - ) - const limit = generator.integer({ min: 1, max: 8 }) - const response = await config.api.viewV2.search(view.id, { - limit, - query: {}, - }) - expect(response.rows).toHaveLength(limit) - }) - - it("can handle pagination", async () => { - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) - ) - ) - const rows = (await config.api.viewV2.search(view.id)).rows - - const page1 = await config.api.viewV2.search(view.id, { - paginate: true, - limit: 4, - query: {}, - countRows: true, - }) - expect(page1).toEqual({ - rows: expect.arrayContaining(rows.slice(0, 4)), - hasNextPage: true, - bookmark: expect.anything(), - totalRows: 10, - }) - - const page2 = await config.api.viewV2.search(view.id, { - paginate: true, - limit: 4, - bookmark: page1.bookmark, - query: {}, - countRows: true, - }) - expect(page2).toEqual({ - rows: expect.arrayContaining(rows.slice(4, 8)), - hasNextPage: true, - bookmark: expect.anything(), - totalRows: 10, - }) - - const page3 = await config.api.viewV2.search(view.id, { - paginate: true, - limit: 4, - bookmark: page2.bookmark, - query: {}, - countRows: true, - }) - const expectation: SearchResponse = { - rows: expect.arrayContaining(rows.slice(8)), - hasNextPage: false, - totalRows: 10, - } - expect(page3).toEqual(expectation) - }) - - const sortTestOptions: [ - { - field: string - order?: SortOrder - type?: SortType - }, - string[] - ][] = [ - [ - { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - type: SortType.NUMBER, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - type: SortType.NUMBER, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - ] - - describe("sorting", () => { - let table: Table - const viewSchema = { - id: { visible: true }, - age: { visible: true }, - name: { visible: true }, - } - - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - type: "table", - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - surname: { - type: FieldType.STRING, - name: "surname", - }, - age: { - type: FieldType.NUMBER, - name: "age", - }, - address: { - type: FieldType.STRING, - name: "address", - }, - jobTitle: { - type: FieldType.STRING, - name: "jobTitle", - }, - }, - }) - ) - - const users = [ - { name: "Alice", age: 25 }, - { name: "Bob", age: 30 }, - { name: "Charly", age: 27 }, - { name: "Danny", age: 15 }, - ] - await Promise.all( - users.map(u => - config.api.row.save(table._id!, { + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(10) + expect(response).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + _viewId: view.id, tableId: table._id, - ...u, + id: r.id, + _id: r._id, + _rev: r._rev, + ...(isInternal + ? { + type: "row", + updatedAt: expect.any(String), + createdAt: expect.any(String), + } + : {}), + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + }), + }) + }) + + it("searching respects the view filters", async () => { + await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + const two = await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "two", + value: "bar2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: true }, + }, + }) + + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(1) + expect(response).toEqual({ + rows: expect.arrayContaining([ + { + _viewId: view.id, + tableId: table._id, + id: two.id, + two: two.two, + _id: two._id, + _rev: two._rev, + ...(isInternal + ? { + type: "row", + createdAt: expect.any(String), + updatedAt: expect.any(String), + } + : {}), + }, + ]), + ...(isInternal + ? {} + : { + hasNextPage: false, + }), + }) + }) + + it("views filters are respected even if the column is hidden", async () => { + await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + const two = await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "two", + value: "bar2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: false }, + }, + }) + + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual([ + expect.objectContaining({ _id: two._id }), + ]) + }) + + it("views without data can be returned", async () => { + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(0) + }) + + it("respects the limit parameter", async () => { + await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) + ) + const limit = generator.integer({ min: 1, max: 8 }) + const response = await config.api.viewV2.search(view.id, { + limit, + query: {}, + }) + expect(response.rows).toHaveLength(limit) + }) + + it("can handle pagination", async () => { + await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) + ) + const rows = (await config.api.viewV2.search(view.id)).rows + + const page1 = await config.api.viewV2.search(view.id, { + paginate: true, + limit: 4, + query: {}, + countRows: true, + }) + expect(page1).toEqual({ + rows: expect.arrayContaining(rows.slice(0, 4)), + hasNextPage: true, + bookmark: expect.anything(), + totalRows: 10, + }) + + const page2 = await config.api.viewV2.search(view.id, { + paginate: true, + limit: 4, + bookmark: page1.bookmark, + query: {}, + countRows: true, + }) + expect(page2).toEqual({ + rows: expect.arrayContaining(rows.slice(4, 8)), + hasNextPage: true, + bookmark: expect.anything(), + totalRows: 10, + }) + + const page3 = await config.api.viewV2.search(view.id, { + paginate: true, + limit: 4, + bookmark: page2.bookmark, + query: {}, + countRows: true, + }) + const expectation: SearchResponse = { + rows: expect.arrayContaining(rows.slice(8)), + hasNextPage: false, + totalRows: 10, + } + expect(page3).toEqual(expectation) + }) + + const sortTestOptions: [ + { + field: string + order?: SortOrder + type?: SortType + }, + string[] + ][] = [ + [ + { + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, + }, + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + }, + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + type: SortType.NUMBER, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + type: SortType.NUMBER, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + ] + + describe("sorting", () => { + let table: Table + const viewSchema = { + id: { visible: true }, + age: { visible: true }, + name: { visible: true }, + } + + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + type: "table", + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + surname: { + type: FieldType.STRING, + name: "surname", + }, + age: { + type: FieldType.NUMBER, + name: "age", + }, + address: { + type: FieldType.STRING, + name: "address", + }, + jobTitle: { + type: FieldType.STRING, + name: "jobTitle", + }, + }, }) ) + + const users = [ + { name: "Alice", age: 25 }, + { name: "Bob", age: 30 }, + { name: "Charly", age: 27 }, + { name: "Danny", age: 15 }, + ] + await Promise.all( + users.map(u => + config.api.row.save(table._id!, { + tableId: table._id, + ...u, + }) + ) + ) + }) + + it.each(sortTestOptions)( + "allow sorting (%s)", + async (sortParams, expected) => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + sort: sortParams, + schema: viewSchema, + }) + + const response = await config.api.viewV2.search(view.id) + + expect(response.rows).toHaveLength(4) + expect(response.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) + } + ) + + it.each(sortTestOptions)( + "allow override the default view sorting (%s)", + async (sortParams, expected) => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + sort: { + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, + }, + schema: viewSchema, + }) + + const response = await config.api.viewV2.search(view.id, { + sort: sortParams.field, + sortOrder: sortParams.order, + sortType: sortParams.type, + query: {}, + }) + + expect(response.rows).toHaveLength(4) + expect(response.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) + } ) }) - it.each(sortTestOptions)( - "allow sorting (%s)", - async (sortParams, expected) => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - sort: sortParams, - schema: viewSchema, + it("can query on top of the view filters", async () => { + await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + const three = await config.api.row.save(table._id!, { + one: "foo3", + two: "bar3", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.NOT_EQUAL, + field: "one", + value: "foo2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: { + [BasicOperator.EQUAL]: { + two: "bar3", + }, + [BasicOperator.NOT_EMPTY]: { + two: null, + }, + }, + }) + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ _id: three._id }), + ]) + ) + }) + + it("can query on top of the view filters (using or filters)", async () => { + const one = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + const three = await config.api.row.save(table._id!, { + one: "foo3", + two: "bar3", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.NOT_EQUAL, + field: "one", + value: "foo2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: true }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: { + allOr: true, + [BasicOperator.NOT_EQUAL]: { + two: "bar", + }, + [BasicOperator.NOT_EMPTY]: { + two: null, + }, + }, + }) + expect(response.rows).toHaveLength(2) + expect(response.rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ _id: one._id }), + expect.objectContaining({ _id: three._id }), + ]) + ) + }) + + it.each([true, false])( + "can filter a view without a view filter", + async allOr => { + const one = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", }) - const response = await config.api.viewV2.search(view.id) - - expect(response.rows).toHaveLength(4) - expect(response.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) - } - ) - - it.each(sortTestOptions)( - "allow override the default view sorting (%s)", - async (sortParams, expected) => { const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - sort: { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: true }, }, - schema: viewSchema, }) const response = await config.api.viewV2.search(view.id, { - sort: sortParams.field, - sortOrder: sortParams.order, - sortType: sortParams.type, - query: {}, + query: { + allOr, + equal: { + two: "bar", + }, + }, }) - - expect(response.rows).toHaveLength(4) - expect(response.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual([ + expect.objectContaining({ _id: one._id }), + ]) } ) - }) - it("can query on top of the view filters", async () => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - const three = await config.api.row.save(table._id!, { - one: "foo3", - two: "bar3", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.NOT_EQUAL, - field: "one", - value: "foo2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: { - [BasicOperator.EQUAL]: { - two: "bar3", - }, - [BasicOperator.NOT_EMPTY]: { - two: null, - }, - }, - }) - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ _id: three._id }), - ]) - ) - }) - - it("can query on top of the view filters (using or filters)", async () => { - const one = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - const three = await config.api.row.save(table._id!, { - one: "foo3", - two: "bar3", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.NOT_EQUAL, - field: "one", - value: "foo2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: { - allOr: true, - [BasicOperator.NOT_EQUAL]: { - two: "bar", - }, - [BasicOperator.NOT_EMPTY]: { - two: null, - }, - }, - }) - expect(response.rows).toHaveLength(2) - expect(response.rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ _id: one._id }), - expect.objectContaining({ _id: three._id }), - ]) - ) - }) - - it.each([true, false])( - "can filter a view without a view filter", - async allOr => { - const one = await config.api.row.save(table._id!, { + it.each([true, false])("cannot bypass a view filter", async allOr => { + await config.api.row.save(table._id!, { one: "foo", two: "bar", }) @@ -3202,6 +3248,19 @@ datasourceDescribe( const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "two", + value: "bar2", + }, + ], + }, + ], + }, schema: { id: { visible: true }, one: { visible: false }, @@ -3217,408 +3276,330 @@ datasourceDescribe( }, }, }) - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual([ - expect.objectContaining({ _id: one._id }), - ]) - } - ) - - it.each([true, false])("cannot bypass a view filter", async allOr => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", + expect(response.rows).toHaveLength(0) }) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "two", - value: "bar2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: { - allOr, - equal: { - two: "bar", - }, - }, - }) - expect(response.rows).toHaveLength(0) - }) - - describe("foreign relationship columns", () => { - const createMainTable = async ( - links: { - name: string - tableId: string - fk: string - }[] - ) => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { title: { name: "title", type: FieldType.STRING } }, - }) - ) - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - ...links.reduce((acc, c) => { - acc[c.name] = { - name: c.name, - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: c.tableId, - fieldName: c.fk, - constraints: { type: "array" }, - } - return acc - }, {}), - }, - }) - return table - } - const createAuxTable = (schema: TableSchema) => - config.api.table.save( - saveTableRequest({ - primaryDisplay: "name", - schema: { - ...schema, - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - - it("returns squashed fields respecting the view config", async () => { - const auxTable = await createAuxTable({ - age: { name: "age", type: FieldType.NUMBER }, - }) - const auxRow = await config.api.row.save(auxTable._id!, { - name: generator.name(), - age: generator.age(), - }) - - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - ]) - await config.api.row.save(table._id!, { - title: generator.word(), - aux: [auxRow], - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - title: { visible: true }, - aux: { - visible: true, - columns: { - name: { visible: false, readonly: false }, - age: { visible: true, readonly: true }, - }, - }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toEqual([ - expect.objectContaining({ - aux: [ - { - _id: auxRow._id, - primaryDisplay: auxRow.name, - age: auxRow.age, - }, - ], - }), - ]) - }) - - it("enriches squashed fields", async () => { - const auxTable = await createAuxTable({ - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - constraints: { presence: true }, - }, - }) - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - ]) - - const user = config.getUser() - const auxRow = await config.api.row.save(auxTable._id!, { - name: generator.name(), - user: user._id, - }) - await config.api.row.save(table._id!, { - title: generator.word(), - aux: [auxRow], - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - title: { visible: true }, - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - user: { visible: true, readonly: true }, - }, - }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - - expect(response.rows).toEqual([ - expect.objectContaining({ - aux: [ - { - _id: auxRow._id, - primaryDisplay: auxRow.name, - name: auxRow.name, - user: { - _id: user._id, - email: user.email, - firstName: user.firstName, - lastName: user.lastName, - primaryDisplay: user.email, - }, - }, - ], - }), - ]) - }) - }) - - describe("calculations", () => { - let table: Table - let rows: Row[] - - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { - quantity: generator.natural({ min: 1, max: 10 }), - price: generator.natural({ min: 1, max: 10 }), + describe("foreign relationship columns", () => { + const createMainTable = async ( + links: { + name: string + tableId: string + fk: string + }[] + ) => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { title: { name: "title", type: FieldType.STRING } }, }) ) - ) - }) - - it("should be able to search by calculations", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - type: ViewV2Type.CALCULATION, - name: generator.guid(), - schema: { - "Quantity Sum": { - visible: true, - calculationType: CalculationType.SUM, - field: "quantity", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - "Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0), - }), - ]) - ) - - // Calculation views do not return rows that can be linked back to - // the source table, and so should not have an _id field. - for (const row of response.rows) { - expect("_id" in row).toBe(false) - } - }) - - it("should be able to group by a basic field", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { - visible: true, - field: "quantity", - }, - "Total Price": { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - const priceByQuantity: Record = {} - for (const row of rows) { - priceByQuantity[row.quantity] ??= 0 - priceByQuantity[row.quantity] += row.price - } - - for (const row of response.rows) { - expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity]) - } - }) - - it.each([ - CalculationType.COUNT, - CalculationType.SUM, - CalculationType.AVG, - CalculationType.MIN, - CalculationType.MAX, - ])("should be able to calculate $type", async type => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - aggregate: { - visible: true, - calculationType: type, - field: "price", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - function calculate( - type: CalculationType, - numbers: number[] - ): number { - switch (type) { - case CalculationType.COUNT: - return numbers.length - case CalculationType.SUM: - return numbers.reduce((a, b) => a + b, 0) - case CalculationType.AVG: - return numbers.reduce((a, b) => a + b, 0) / numbers.length - case CalculationType.MIN: - return Math.min(...numbers) - case CalculationType.MAX: - return Math.max(...numbers) - } - } - - const prices = rows.map(row => row.price) - const expected = calculate(type, prices) - const actual = response.rows[0].aggregate - - if (type === CalculationType.AVG) { - // The average calculation can introduce floating point rounding - // errors, so we need to compare to within a small margin of - // error. - expect(actual).toBeCloseTo(expected) - } else { - expect(actual).toEqual(expected) - } - }) - - it("should be able to do a COUNT(DISTINCT)", async () => { - const table = await config.api.table.save( - saveTableRequest({ + await config.api.table.save({ + ...table, schema: { - name: { - name: "name", - type: FieldType.STRING, + ...table.schema, + ...links.reduce((acc, c) => { + acc[c.name] = { + name: c.name, + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: c.tableId, + fieldName: c.fk, + constraints: { type: "array" }, + } + return acc + }, {}), + }, + }) + return table + } + const createAuxTable = (schema: TableSchema) => + config.api.table.save( + saveTableRequest({ + primaryDisplay: "name", + schema: { + ...schema, + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + + it("returns squashed fields respecting the view config", async () => { + const auxTable = await createAuxTable({ + age: { name: "age", type: FieldType.NUMBER }, + }) + const auxRow = await config.api.row.save(auxTable._id!, { + name: generator.name(), + age: generator.age(), + }) + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + ]) + await config.api.row.save(table._id!, { + title: generator.word(), + aux: [auxRow], + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + title: { visible: true }, + aux: { + visible: true, + columns: { + name: { visible: false, readonly: false }, + age: { visible: true, readonly: true }, + }, }, }, }) - ) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "name", - }, - }, + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toEqual([ + expect.objectContaining({ + aux: [ + { + _id: auxRow._id, + primaryDisplay: auxRow.name, + age: auxRow.age, + }, + ], + }), + ]) }) - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "John", + it("enriches squashed fields", async () => { + const auxTable = await createAuxTable({ + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + constraints: { presence: true }, }, - { - name: "John", - }, - { - name: "Sue", - }, - ], - }) + }) + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + ]) - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].count).toEqual(2) + const user = config.getUser() + const auxRow = await config.api.row.save(auxTable._id!, { + name: generator.name(), + user: user._id, + }) + await config.api.row.save(table._id!, { + title: generator.word(), + aux: [auxRow], + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + title: { visible: true }, + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + user: { visible: true, readonly: true }, + }, + }, + }, + }) + + const response = await config.api.viewV2.search(view.id) + + expect(response.rows).toEqual([ + expect.objectContaining({ + aux: [ + { + _id: auxRow._id, + primaryDisplay: auxRow.name, + name: auxRow.name, + user: { + _id: user._id, + email: user.email, + firstName: user.firstName, + lastName: user.lastName, + primaryDisplay: user.email, + }, + }, + ], + }), + ]) + }) }) - it("should not be able to COUNT(DISTINCT ...) against a non-existent field", async () => { - await config.api.viewV2.create( - { + describe("calculations", () => { + let table: Table + let rows: Row[] + + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, { + quantity: generator.natural({ min: 1, max: 10 }), + price: generator.natural({ min: 1, max: 10 }), + }) + ) + ) + }) + + it("should be able to search by calculations", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + type: ViewV2Type.CALCULATION, + name: generator.guid(), + schema: { + "Quantity Sum": { + visible: true, + calculationType: CalculationType.SUM, + field: "quantity", + }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: {}, + }) + + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + "Quantity Sum": rows.reduce( + (acc, r) => acc + r.quantity, + 0 + ), + }), + ]) + ) + + // Calculation views do not return rows that can be linked back to + // the source table, and so should not have an _id field. + for (const row of response.rows) { + expect("_id" in row).toBe(false) + } + }) + + it("should be able to group by a basic field", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { + visible: true, + field: "quantity", + }, + "Total Price": { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: {}, + }) + + const priceByQuantity: Record = {} + for (const row of rows) { + priceByQuantity[row.quantity] ??= 0 + priceByQuantity[row.quantity] += row.price + } + + for (const row of response.rows) { + expect(row["Total Price"]).toEqual( + priceByQuantity[row.quantity] + ) + } + }) + + it.each([ + CalculationType.COUNT, + CalculationType.SUM, + CalculationType.AVG, + CalculationType.MIN, + CalculationType.MAX, + ])("should be able to calculate $type", async type => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + aggregate: { + visible: true, + calculationType: type, + field: "price", + }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: {}, + }) + + function calculate( + type: CalculationType, + numbers: number[] + ): number { + switch (type) { + case CalculationType.COUNT: + return numbers.length + case CalculationType.SUM: + return numbers.reduce((a, b) => a + b, 0) + case CalculationType.AVG: + return numbers.reduce((a, b) => a + b, 0) / numbers.length + case CalculationType.MIN: + return Math.min(...numbers) + case CalculationType.MAX: + return Math.max(...numbers) + } + } + + const prices = rows.map(row => row.price) + const expected = calculate(type, prices) + const actual = response.rows[0].aggregate + + if (type === CalculationType.AVG) { + // The average calculation can introduce floating point rounding + // errors, so we need to compare to within a small margin of + // error. + expect(actual).toBeCloseTo(expected) + } else { + expect(actual).toEqual(expected) + } + }) + + it("should be able to do a COUNT(DISTINCT)", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), type: ViewV2Type.CALCULATION, @@ -3627,71 +3608,474 @@ datasourceDescribe( visible: true, calculationType: CalculationType.COUNT, distinct: true, - field: "does not exist oh no", - }, - }, - }, - { - status: 400, - body: { - message: - 'Calculation field "count" references field "does not exist oh no" which does not exist in the table schema', - }, - } - ) - }) - - it("should be able to filter on relationships", async () => { - const companies = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, + field: "name", }, }, }) - ) - const employees = await config.api.table.save( - saveTableRequest({ - schema: { - age: { - type: FieldType.NUMBER, - name: "age", - }, - name: { - type: FieldType.STRING, - name: "name", - }, - company: { - type: FieldType.LINK, - name: "company", - tableId: companies._id!, - relationshipType: RelationshipType.ONE_TO_MANY, - fieldName: "company", - }, - }, - }) - ) - - const view = await config.api.viewV2.create({ - tableId: employees._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - queryUI: { - groups: [ + await config.api.row.bulkImport(table._id!, { + rows: [ { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "company.name", - value: "Aperture Science Laboratories", - }, - ], + name: "John", + }, + { + name: "John", + }, + { + name: "Sue", }, ], - }, + }) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].count).toEqual(2) + }) + + it("should not be able to COUNT(DISTINCT ...) against a non-existent field", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "does not exist oh no", + }, + }, + }, + { + status: 400, + body: { + message: + 'Calculation field "count" references field "does not exist oh no" which does not exist in the table schema', + }, + } + ) + }) + + it("should be able to filter on relationships", async () => { + const companies = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + const employees = await config.api.table.save( + saveTableRequest({ + schema: { + age: { + type: FieldType.NUMBER, + name: "age", + }, + name: { + type: FieldType.STRING, + name: "name", + }, + company: { + type: FieldType.LINK, + name: "company", + tableId: companies._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + fieldName: "company", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: employees._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "company.name", + value: "Aperture Science Laboratories", + }, + ], + }, + ], + }, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "age", + }, + }, + }) + + const apertureScience = await config.api.row.save( + companies._id!, + { + name: "Aperture Science Laboratories", + } + ) + + const blackMesa = await config.api.row.save(companies._id!, { + name: "Black Mesa", + }) + + await Promise.all([ + config.api.row.save(employees._id!, { + name: "Alice", + age: 25, + company: apertureScience._id, + }), + config.api.row.save(employees._id!, { + name: "Bob", + age: 30, + company: apertureScience._id, + }), + config.api.row.save(employees._id!, { + name: "Charly", + age: 27, + company: blackMesa._id, + }), + config.api.row.save(employees._id!, { + name: "Danny", + age: 15, + company: blackMesa._id, + }), + ]) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + }) + + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual(55) + }) + + it("should be able to count non-numeric fields", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + firstName: { + type: FieldType.STRING, + name: "firstName", + }, + lastName: { + type: FieldType.STRING, + name: "lastName", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "firstName", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { firstName: "Jane", lastName: "Smith" }, + { firstName: "Jane", lastName: "Doe" }, + { firstName: "Alice", lastName: "Smith" }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + }) + + expect(rows).toHaveLength(1) + expect(rows[0].count).toEqual(3) + }) + + it("should be able to filter rows on the view itself", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "quantity", + value: 1, + }, + ], + }, + ], + }, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + }) + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual(3) + }) + + it("should be able to filter on group by fields", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { visible: true }, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: { + equal: { + quantity: 1, + }, + }, + }) + + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual(3) + }) + + it("should be able to sort by group by field", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { visible: true }, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + sort: "quantity", + sortOrder: SortOrder.DESCENDING, + }) + + expect(rows).toEqual([ + expect.objectContaining({ quantity: 2, sum: 10 }), + expect.objectContaining({ quantity: 1, sum: 3 }), + ]) + }) + + it("should be able to sort by a calculation", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { visible: true }, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + sort: "sum", + sortOrder: SortOrder.DESCENDING, + }) + + expect(rows).toEqual([ + expect.objectContaining({ quantity: 2, sum: 10 }), + expect.objectContaining({ quantity: 1, sum: 3 }), + ]) + }) + }) + + it("should not need required fields to be present", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { + presence: true, + }, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + }, + }) + ) + + await Promise.all([ + config.api.row.save(table._id!, { name: "Steve", age: 30 }), + config.api.row.save(table._id!, { name: "Jane", age: 31 }), + ]) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, schema: { sum: { visible: true, @@ -3701,934 +4085,572 @@ datasourceDescribe( }, }) - const apertureScience = await config.api.row.save(companies._id!, { - name: "Aperture Science Laboratories", - }) - - const blackMesa = await config.api.row.save(companies._id!, { - name: "Black Mesa", - }) - - await Promise.all([ - config.api.row.save(employees._id!, { - name: "Alice", - age: 25, - company: apertureScience._id, - }), - config.api.row.save(employees._id!, { - name: "Bob", - age: 30, - company: apertureScience._id, - }), - config.api.row.save(employees._id!, { - name: "Charly", - age: 27, - company: blackMesa._id, - }), - config.api.row.save(employees._id!, { - name: "Danny", - age: 15, - company: blackMesa._id, - }), - ]) - - const { rows } = await config.api.viewV2.search(view.id, { + const response = await config.api.viewV2.search(view.id, { query: {}, }) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual(55) + expect(response.rows).toHaveLength(1) + expect(response.rows[0].sum).toEqual(61) }) - it("should be able to count non-numeric fields", async () => { + it("should be able to filter on a single user field in both the view query and search query", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { - firstName: { - type: FieldType.STRING, - name: "firstName", - }, - lastName: { - type: FieldType.STRING, - name: "lastName", + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, }, }, }) ) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "firstName", - }, - }, + await config.api.row.save(table._id!, { + user: config.getUser()._id, }) - await config.api.row.bulkImport(table._id!, { - rows: [ - { firstName: "Jane", lastName: "Smith" }, - { firstName: "Jane", lastName: "Doe" }, - { firstName: "Alice", lastName: "Smith" }, - ], - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - }) - - expect(rows).toHaveLength(1) - expect(rows[0].count).toEqual(3) - }) - - it("should be able to filter rows on the view itself", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, queryUI: { groups: [ { filters: [ { operator: BasicOperator.EQUAL, - field: "quantity", - value: 1, + field: "user", + value: "{{ [user].[_id] }}", }, ], }, ], }, schema: { - sum: { + user: { visible: true, - calculationType: CalculationType.SUM, - field: "price", }, }, }) - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - }) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual(3) - }) - - it("should be able to filter on group by fields", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { visible: true }, - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - const { rows } = await config.api.viewV2.search(view.id, { query: { equal: { - quantity: 1, + user: "{{ [user].[_id] }}", }, }, }) expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual(3) + expect(rows[0].user._id).toEqual(config.getUser()._id) }) - it("should be able to sort by group by field", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { visible: true }, - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - sort: "quantity", - sortOrder: SortOrder.DESCENDING, - }) - - expect(rows).toEqual([ - expect.objectContaining({ quantity: 2, sum: 10 }), - expect.objectContaining({ quantity: 1, sum: 3 }), - ]) - }) - - it("should be able to sort by a calculation", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { visible: true }, - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - sort: "sum", - sortOrder: SortOrder.DESCENDING, - }) - - expect(rows).toEqual([ - expect.objectContaining({ quantity: 2, sum: 10 }), - expect.objectContaining({ quantity: 1, sum: 3 }), - ]) - }) - }) - - it("should not need required fields to be present", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { - presence: true, - }, - }, - age: { - name: "age", - type: FieldType.NUMBER, - }, - }, - }) - ) - - await Promise.all([ - config.api.row.save(table._id!, { name: "Steve", age: 30 }), - config.api.row.save(table._id!, { name: "Jane", age: 31 }), - ]) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "age", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - expect(response.rows).toHaveLength(1) - expect(response.rows[0].sum).toEqual(61) - }) - - it("should be able to filter on a single user field in both the view query and search query", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - }, - }) - ) - - await config.api.row.save(table._id!, { - user: config.getUser()._id, - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "user", - value: "{{ [user].[_id] }}", + describe("search operators", () => { + let table: Table + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + string: { name: "string", type: FieldType.STRING }, + longform: { name: "longform", type: FieldType.LONGFORM }, + options: { + name: "options", + type: FieldType.OPTIONS, + constraints: { inclusion: ["a", "b", "c"] }, }, - ], - }, - ], - }, - schema: { - user: { - visible: true, - }, - }, - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: { - equal: { - user: "{{ [user].[_id] }}", - }, - }, - }) - - expect(rows).toHaveLength(1) - expect(rows[0].user._id).toEqual(config.getUser()._id) - }) - - describe("search operators", () => { - let table: Table - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - string: { name: "string", type: FieldType.STRING }, - longform: { name: "longform", type: FieldType.LONGFORM }, - options: { - name: "options", - type: FieldType.OPTIONS, - constraints: { inclusion: ["a", "b", "c"] }, - }, - array: { - name: "array", - type: FieldType.ARRAY, - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["a", "b", "c"], - }, - }, - number: { name: "number", type: FieldType.NUMBER }, - bigint: { name: "bigint", type: FieldType.BIGINT }, - datetime: { name: "datetime", type: FieldType.DATETIME }, - boolean: { name: "boolean", type: FieldType.BOOLEAN }, - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: JsonFieldSubType.ARRAY, + array: { + name: "array", + type: FieldType.ARRAY, + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["a", "b", "c"], + }, + }, + number: { name: "number", type: FieldType.NUMBER }, + bigint: { name: "bigint", type: FieldType.BIGINT }, + datetime: { name: "datetime", type: FieldType.DATETIME }, + boolean: { name: "boolean", type: FieldType.BOOLEAN }, + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: JsonFieldSubType.ARRAY, + }, }, }, + }) + ) + }) + + interface TestCase { + name: string + query: UISearchFilter | (() => UISearchFilter) + insert: Row[] | (() => Row[]) + expected: Row[] | (() => Row[]) + searchOpts?: Partial + } + + function simpleQuery(...filters: LegacyFilter[]): UISearchFilter { + return { groups: [{ filters }] } + } + + const testCases: TestCase[] = [ + { + name: "empty query return all", + insert: [{ string: "foo" }], + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, }, - }) - ) - }) - - interface TestCase { - name: string - query: UISearchFilter | (() => UISearchFilter) - insert: Row[] | (() => Row[]) - expected: Row[] | (() => Row[]) - searchOpts?: Partial - } - - function simpleQuery(...filters: LegacyFilter[]): UISearchFilter { - return { groups: [{ filters }] } - } - - const testCases: TestCase[] = [ - { - name: "empty query return all", - insert: [{ string: "foo" }], - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, + expected: [{ string: "foo" }], }, - expected: [{ string: "foo" }], - }, - { - name: "empty query return none", - insert: [{ string: "foo" }], - query: { - onEmptyFilter: EmptyFilterOption.RETURN_NONE, + { + name: "empty query return none", + insert: [{ string: "foo" }], + query: { + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + }, + expected: [], }, - expected: [], - }, - { - name: "simple string search", - insert: [{ string: "foo" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }), - expected: [{ string: "foo" }], - }, - { - name: "non matching string search", - insert: [{ string: "foo" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "string", - value: "bar", - }), - expected: [], - }, - { - name: "allOr", - insert: [{ string: "bar" }, { string: "foo" }], - query: simpleQuery( - { + { + name: "simple string search", + insert: [{ string: "foo" }], + query: simpleQuery({ operator: BasicOperator.EQUAL, field: "string", value: "foo", - }, - { + }), + expected: [{ string: "foo" }], + }, + { + name: "non matching string search", + insert: [{ string: "foo" }], + query: simpleQuery({ operator: BasicOperator.EQUAL, field: "string", value: "bar", + }), + expected: [], + }, + { + name: "allOr", + insert: [{ string: "bar" }, { string: "foo" }], + query: simpleQuery( + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "string", + value: "bar", + }, + { + operator: "allOr", + } + ), + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, }, - { - operator: "allOr", - } - ), - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, + expected: [{ string: "bar" }, { string: "foo" }], }, - expected: [{ string: "bar" }, { string: "foo" }], - }, - { - name: "can find rows with fuzzy search", - insert: [{ string: "foo" }, { string: "bar" }], - query: simpleQuery({ - operator: BasicOperator.FUZZY, - field: "string", - value: "fo", - }), - expected: [{ string: "foo" }], - }, - { - name: "can find nothing with fuzzy search", - insert: [{ string: "foo" }, { string: "bar" }], - query: simpleQuery({ - operator: BasicOperator.FUZZY, - field: "string", - value: "baz", - }), - expected: [], - }, - { - name: "can find numeric rows", - insert: [{ number: 1 }, { number: 2 }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "number", - value: 1, - }), - expected: [{ number: 1 }], - }, - { - name: "can find numeric values with rangeHigh", - insert: [{ number: 1 }, { number: 2 }, { number: 3 }], - query: simpleQuery({ - operator: "rangeHigh", - field: "number", - value: 2, - }), - searchOpts: { - sort: "number", - sortOrder: SortOrder.ASCENDING, + { + name: "can find rows with fuzzy search", + insert: [{ string: "foo" }, { string: "bar" }], + query: simpleQuery({ + operator: BasicOperator.FUZZY, + field: "string", + value: "fo", + }), + expected: [{ string: "foo" }], }, - expected: [{ number: 1 }, { number: 2 }], - }, - { - name: "can find numeric values with rangeLow", - insert: [{ number: 1 }, { number: 2 }, { number: 3 }], - query: simpleQuery({ - operator: "rangeLow", - field: "number", - value: 2, - }), - searchOpts: { - sort: "number", - sortOrder: SortOrder.ASCENDING, + { + name: "can find nothing with fuzzy search", + insert: [{ string: "foo" }, { string: "bar" }], + query: simpleQuery({ + operator: BasicOperator.FUZZY, + field: "string", + value: "baz", + }), + expected: [], }, - expected: [{ number: 2 }, { number: 3 }], - }, - { - name: "can find numeric values with full range", - insert: [{ number: 1 }, { number: 2 }, { number: 3 }], - query: simpleQuery( - { + { + name: "can find numeric rows", + insert: [{ number: 1 }, { number: 2 }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "number", + value: 1, + }), + expected: [{ number: 1 }], + }, + { + name: "can find numeric values with rangeHigh", + insert: [{ number: 1 }, { number: 2 }, { number: 3 }], + query: simpleQuery({ operator: "rangeHigh", field: "number", value: 2, + }), + searchOpts: { + sort: "number", + sortOrder: SortOrder.ASCENDING, }, - { + expected: [{ number: 1 }, { number: 2 }], + }, + { + name: "can find numeric values with rangeLow", + insert: [{ number: 1 }, { number: 2 }, { number: 3 }], + query: simpleQuery({ operator: "rangeLow", field: "number", value: 2, - } - ), - expected: [{ number: 2 }], - }, - { - name: "can find longform values", - insert: [{ longform: "foo" }, { longform: "bar" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "longform", - value: "foo", - }), - expected: [{ longform: "foo" }], - }, - { - name: "can find options values", - insert: [{ options: "a" }, { options: "b" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "options", - value: "a", - }), - expected: [{ options: "a" }], - }, - { - name: "can find array values", - insert: [ - // Number field here is just to guarantee order. - { number: 1, array: ["a"] }, - { number: 2, array: ["b"] }, - { number: 3, array: ["a", "c"] }, - ], - query: simpleQuery({ - operator: ArrayOperator.CONTAINS, - field: "array", - value: "a", - }), - searchOpts: { - sort: "number", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ array: ["a"] }, { array: ["a", "c"] }], - }, - { - name: "can find bigint values", - insert: [{ bigint: "1" }, { bigint: "2" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "bigint", - type: FieldType.BIGINT, - value: "1", - }), - expected: [{ bigint: "1" }], - }, - { - name: "can find datetime values", - insert: [ - { datetime: "2021-01-01T00:00:00.000Z" }, - { datetime: "2021-01-02T00:00:00.000Z" }, - ], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "datetime", - type: FieldType.DATETIME, - value: "2021-01-01", - }), - expected: [{ datetime: "2021-01-01T00:00:00.000Z" }], - }, - { - name: "can find boolean values", - insert: [{ boolean: true }, { boolean: false }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "boolean", - value: true, - }), - expected: [{ boolean: true }], - }, - { - name: "can find user values", - insert: () => [{ user: config.getUser() }], - query: () => - simpleQuery({ - operator: BasicOperator.EQUAL, - field: "user", - value: config.getUser()._id, }), - expected: () => [ - { - user: expect.objectContaining({ _id: config.getUser()._id }), + searchOpts: { + sort: "number", + sortOrder: SortOrder.ASCENDING, }, - ], - }, - { - name: "can find users values", - insert: () => [{ users: [config.getUser()] }], - query: () => - simpleQuery({ - operator: ArrayOperator.CONTAINS, - field: "users", - value: [config.getUser()._id], + expected: [{ number: 2 }, { number: 3 }], + }, + { + name: "can find numeric values with full range", + insert: [{ number: 1 }, { number: 2 }, { number: 3 }], + query: simpleQuery( + { + operator: "rangeHigh", + field: "number", + value: 2, + }, + { + operator: "rangeLow", + field: "number", + value: 2, + } + ), + expected: [{ number: 2 }], + }, + { + name: "can find longform values", + insert: [{ longform: "foo" }, { longform: "bar" }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "longform", + value: "foo", }), - expected: () => [ - { - users: [ - expect.objectContaining({ _id: config.getUser()._id }), + expected: [{ longform: "foo" }], + }, + { + name: "can find options values", + insert: [{ options: "a" }, { options: "b" }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "options", + value: "a", + }), + expected: [{ options: "a" }], + }, + { + name: "can find array values", + insert: [ + // Number field here is just to guarantee order. + { number: 1, array: ["a"] }, + { number: 2, array: ["b"] }, + { number: 3, array: ["a", "c"] }, + ], + query: simpleQuery({ + operator: ArrayOperator.CONTAINS, + field: "array", + value: "a", + }), + searchOpts: { + sort: "number", + sortOrder: SortOrder.ASCENDING, + }, + expected: [{ array: ["a"] }, { array: ["a", "c"] }], + }, + { + name: "can find bigint values", + insert: [{ bigint: "1" }, { bigint: "2" }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "bigint", + type: FieldType.BIGINT, + value: "1", + }), + expected: [{ bigint: "1" }], + }, + { + name: "can find datetime values", + insert: [ + { datetime: "2021-01-01T00:00:00.000Z" }, + { datetime: "2021-01-02T00:00:00.000Z" }, + ], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "datetime", + type: FieldType.DATETIME, + value: "2021-01-01", + }), + expected: [{ datetime: "2021-01-01T00:00:00.000Z" }], + }, + { + name: "can find boolean values", + insert: [{ boolean: true }, { boolean: false }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "boolean", + value: true, + }), + expected: [{ boolean: true }], + }, + { + name: "can find user values", + insert: () => [{ user: config.getUser() }], + query: () => + simpleQuery({ + operator: BasicOperator.EQUAL, + field: "user", + value: config.getUser()._id, + }), + expected: () => [ + { + user: expect.objectContaining({ + _id: config.getUser()._id, + }), + }, + ], + }, + { + name: "can find users values", + insert: () => [{ users: [config.getUser()] }], + query: () => + simpleQuery({ + operator: ArrayOperator.CONTAINS, + field: "users", + value: [config.getUser()._id], + }), + expected: () => [ + { + users: [ + expect.objectContaining({ _id: config.getUser()._id }), + ], + }, + ], + }, + { + name: "can handle logical operator any", + insert: [{ string: "bar" }, { string: "foo" }], + query: { + groups: [ + { + logicalOperator: UILogicalOperator.ANY, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "string", + value: "bar", + }, + ], + }, ], }, - ], - }, - { - name: "can handle logical operator any", - insert: [{ string: "bar" }, { string: "foo" }], - query: { - groups: [ - { - logicalOperator: UILogicalOperator.ANY, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }, - { - operator: BasicOperator.EQUAL, - field: "string", - value: "bar", - }, - ], - }, - ], - }, - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ string: "bar" }, { string: "foo" }], - }, - { - name: "can handle logical operator all", - insert: [ - { string: "bar", number: 1 }, - { string: "foo", number: 2 }, - ], - query: { - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }, - { - operator: BasicOperator.EQUAL, - field: "number", - value: 2, - }, - ], - }, - ], - }, - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ string: "foo", number: 2 }], - }, - { - name: "overrides allOr with logical operators", - insert: [ - { string: "bar", number: 1 }, - { string: "foo", number: 1 }, - ], - query: { - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { operator: "allOr" }, - { - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }, - { - operator: BasicOperator.EQUAL, - field: "number", - value: 1, - }, - ], - }, - ], - }, - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ string: "foo", number: 1 }], - }, - ] - - it.each(testCases)( - "$name", - async ({ query, insert, expected, searchOpts }) => { - // Some values can't be specified outside of a test (e.g. getting - // config.getUser(), it won't be initialised), so we use functions - // in those cases. - if (typeof insert === "function") { - insert = insert() - } - if (typeof expected === "function") { - expected = expected() - } - if (typeof query === "function") { - query = query() - } - - await config.api.row.bulkImport(table._id!, { rows: insert }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: query, - schema: { - string: { visible: true }, - longform: { visible: true }, - options: { visible: true }, - array: { visible: true }, - number: { visible: true }, - bigint: { visible: true }, - datetime: { visible: true }, - boolean: { visible: true }, - user: { visible: true }, - users: { visible: true }, + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, }, - }) + expected: [{ string: "bar" }, { string: "foo" }], + }, + { + name: "can handle logical operator all", + insert: [ + { string: "bar", number: 1 }, + { string: "foo", number: 2 }, + ], + query: { + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "number", + value: 2, + }, + ], + }, + ], + }, + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, + }, + expected: [{ string: "foo", number: 2 }], + }, + { + name: "overrides allOr with logical operators", + insert: [ + { string: "bar", number: 1 }, + { string: "foo", number: 1 }, + ], + query: { + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { operator: "allOr" }, + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "number", + value: 1, + }, + ], + }, + ], + }, + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, + }, + expected: [{ string: "foo", number: 1 }], + }, + ] - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - ...searchOpts, - }) - expect(rows).toEqual( - expected.map(r => expect.objectContaining(r)) - ) - } - ) - }) - }) + it.each(testCases)( + "$name", + async ({ query, insert, expected, searchOpts }) => { + // Some values can't be specified outside of a test (e.g. getting + // config.getUser(), it won't be initialised), so we use functions + // in those cases. + if (typeof insert === "function") { + insert = insert() + } + if (typeof expected === "function") { + expected = expected() + } + if (typeof query === "function") { + query = query() + } - describe("permissions", () => { - beforeEach(async () => { - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) + await config.api.row.bulkImport(table._id!, { rows: insert }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: query, + schema: { + string: { visible: true }, + longform: { visible: true }, + options: { visible: true }, + array: { visible: true }, + number: { visible: true }, + bigint: { visible: true }, + datetime: { visible: true }, + boolean: { visible: true }, + user: { visible: true }, + users: { visible: true }, + }, + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + ...searchOpts, + }) + expect(rows).toEqual( + expected.map(r => expect.objectContaining(r)) + ) + } ) - ) - }) - - it("does not allow public users to fetch by default", async () => { - await config.publish() - await config.api.viewV2.publicSearch(view.id, undefined, { - status: 401, }) }) - it("allow public users to fetch when permissions are explicit", async () => { - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: view.id, + describe("permissions", () => { + beforeEach(async () => { + await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) + ) }) - await config.publish() - const response = await config.api.viewV2.publicSearch(view.id) - - expect(response.rows).toHaveLength(10) - }) - - it("allow public users to fetch when permissions are inherited", async () => { - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: table._id!, + it("does not allow public users to fetch by default", async () => { + await config.publish() + await config.api.viewV2.publicSearch(view.id, undefined, { + status: 401, + }) }) - await config.api.permission.revoke({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission - level: PermissionLevel.READ, - resourceId: view.id, + + it("allow public users to fetch when permissions are explicit", async () => { + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: view.id, + }) + await config.publish() + + const response = await config.api.viewV2.publicSearch(view.id) + + expect(response.rows).toHaveLength(10) }) - await config.publish() - const response = await config.api.viewV2.publicSearch(view.id) + it("allow public users to fetch when permissions are inherited", async () => { + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: table._id!, + }) + await config.api.permission.revoke({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission + level: PermissionLevel.READ, + resourceId: view.id, + }) + await config.publish() - expect(response.rows).toHaveLength(10) - }) + const response = await config.api.viewV2.publicSearch(view.id) - it("respects inherited permissions, not allowing not public views from public tables", async () => { - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: table._id!, + expect(response.rows).toHaveLength(10) }) - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.POWER, - level: PermissionLevel.READ, - resourceId: view.id, - }) - await config.publish() - await config.api.viewV2.publicSearch(view.id, undefined, { - status: 401, + it("respects inherited permissions, not allowing not public views from public tables", async () => { + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: table._id!, + }) + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.POWER, + level: PermissionLevel.READ, + resourceId: view.id, + }) + await config.publish() + + await config.api.viewV2.publicSearch(view.id, undefined, { + status: 401, + }) }) }) }) - }) - } -) + } + ) +} diff --git a/packages/server/src/automations/tests/executeQuery.spec.ts b/packages/server/src/automations/tests/executeQuery.spec.ts index 12bbe892a0..2d65be6e58 100644 --- a/packages/server/src/automations/tests/executeQuery.spec.ts +++ b/packages/server/src/automations/tests/executeQuery.spec.ts @@ -7,71 +7,74 @@ import { import { Knex } from "knex" import { generator } from "@budibase/backend-core/tests" -datasourceDescribe( - { - name: "execute query action", - exclude: [DatabaseName.MONGODB, DatabaseName.SQS], - }, - ({ config, dsProvider }) => { - let tableName: string - let client: Knex - let datasource: Datasource - let query: Query +const descriptions = datasourceDescribe({ + exclude: [DatabaseName.MONGODB, DatabaseName.SQS], +}) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - client = ds.client! - }) +if (descriptions.length) { + describe.each(descriptions)( + "execute query action ($dbName)", + ({ config, dsProvider }) => { + let tableName: string + let client: Knex + let datasource: Datasource + let query: Query - beforeEach(async () => { - tableName = generator.guid() - await client.schema.createTable(tableName, table => { - table.string("a") - table.integer("b") + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + client = ds.client! }) - await client(tableName).insert({ a: "string", b: 1 }) - query = await setup.saveTestQuery(config, client, tableName, datasource) - }) - afterEach(async () => { - await client.schema.dropTable(tableName) - }) + beforeEach(async () => { + tableName = generator.guid() + await client.schema.createTable(tableName, table => { + table.string("a") + table.integer("b") + }) + await client(tableName).insert({ a: "string", b: 1 }) + query = await setup.saveTestQuery(config, client, tableName, datasource) + }) - it("should be able to execute a query", async () => { - let res = await setup.runStep( - config, - setup.actions.EXECUTE_QUERY.stepId, - { - query: { queryId: query._id }, - } - ) - expect(res.response).toEqual([{ a: "string", b: 1 }]) - expect(res.success).toEqual(true) - }) + afterEach(async () => { + await client.schema.dropTable(tableName) + }) - it("should handle a null query value", async () => { - let res = await setup.runStep( - config, - setup.actions.EXECUTE_QUERY.stepId, - { - query: null, - } - ) - expect(res.response.message).toEqual("Invalid inputs") - expect(res.success).toEqual(false) - }) + it("should be able to execute a query", async () => { + let res = await setup.runStep( + config, + setup.actions.EXECUTE_QUERY.stepId, + { + query: { queryId: query._id }, + } + ) + expect(res.response).toEqual([{ a: "string", b: 1 }]) + expect(res.success).toEqual(true) + }) - it("should handle an error executing a query", async () => { - let res = await setup.runStep( - config, - setup.actions.EXECUTE_QUERY.stepId, - { - query: { queryId: "wrong_id" }, - } - ) - expect(res.response).toBeDefined() - expect(res.success).toEqual(false) - }) - } -) + it("should handle a null query value", async () => { + let res = await setup.runStep( + config, + setup.actions.EXECUTE_QUERY.stepId, + { + query: null, + } + ) + expect(res.response.message).toEqual("Invalid inputs") + expect(res.success).toEqual(false) + }) + + it("should handle an error executing a query", async () => { + let res = await setup.runStep( + config, + setup.actions.EXECUTE_QUERY.stepId, + { + query: { queryId: "wrong_id" }, + } + ) + expect(res.response).toBeDefined() + expect(res.success).toEqual(false) + }) + } + ) +} diff --git a/packages/server/src/automations/tests/scenarios/scenarios.spec.ts b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts index dcfd4a4341..45b251f4c1 100644 --- a/packages/server/src/automations/tests/scenarios/scenarios.spec.ts +++ b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts @@ -433,9 +433,10 @@ describe("Automation Scenarios", () => { }) }) -datasourceDescribe( - { name: "", only: [DatabaseName.MYSQL] }, - ({ config, dsProvider }) => { +const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] }) + +if (descriptions.length) { + describe.each(descriptions)("/rows ($dbName)", ({ config, dsProvider }) => { let datasource: Datasource let client: Knex @@ -531,5 +532,5 @@ datasourceDescribe( ) }) }) - } -) + }) +} diff --git a/packages/server/src/integration-test/mysql.spec.ts b/packages/server/src/integration-test/mysql.spec.ts index 8edf6a0190..9cf7242e24 100644 --- a/packages/server/src/integration-test/mysql.spec.ts +++ b/packages/server/src/integration-test/mysql.spec.ts @@ -10,119 +10,123 @@ function uniqueTableName(length?: number): string { .substring(0, length || 10) } -datasourceDescribe( - { - name: "Integration compatibility with mysql search_path", - only: [DatabaseName.MYSQL], - }, - ({ config, dsProvider }) => { - let rawDatasource: Datasource - let datasource: Datasource - let client: Knex +const mainDescriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] }) - const database = generator.guid() - const database2 = generator.guid() +if (mainDescriptions.length) { + describe.each(mainDescriptions)( + "/Integration compatibility with mysql search_path ($dbName)", + ({ config, dsProvider }) => { + let rawDatasource: Datasource + let datasource: Datasource + let client: Knex - beforeAll(async () => { - const ds = await dsProvider() - rawDatasource = ds.rawDatasource! - datasource = ds.datasource! - client = ds.client! + const database = generator.guid() + const database2 = generator.guid() - await client.raw(`CREATE DATABASE \`${database}\`;`) - await client.raw(`CREATE DATABASE \`${database2}\`;`) + beforeAll(async () => { + const ds = await dsProvider() + rawDatasource = ds.rawDatasource! + datasource = ds.datasource! + client = ds.client! - rawDatasource.config!.database = database - datasource = await config.api.datasource.create(rawDatasource) - }) + await client.raw(`CREATE DATABASE \`${database}\`;`) + await client.raw(`CREATE DATABASE \`${database2}\`;`) - afterAll(async () => { - await client.raw(`DROP DATABASE \`${database}\`;`) - await client.raw(`DROP DATABASE \`${database2}\`;`) - }) - - it("discovers tables from any schema in search path", async () => { - await client.schema.createTable(`${database}.table1`, table => { - table.increments("id1").primary() + rawDatasource.config!.database = database + datasource = await config.api.datasource.create(rawDatasource) }) - const res = await config.api.datasource.info(datasource) - expect(res.tableNames).toBeDefined() - expect(res.tableNames).toEqual(expect.arrayContaining(["table1"])) - }) - it("does not mix columns from different tables", async () => { - const repeated_table_name = "table_same_name" - await client.schema.createTable( - `${database}.${repeated_table_name}`, - table => { - table.increments("id").primary() - table.string("val1") - } - ) - await client.schema.createTable( - `${database2}.${repeated_table_name}`, - table => { - table.increments("id2").primary() - table.string("val2") - } - ) - - const res = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - tablesFilter: [repeated_table_name], + afterAll(async () => { + await client.raw(`DROP DATABASE \`${database}\`;`) + await client.raw(`DROP DATABASE \`${database2}\`;`) }) - expect(res.datasource.entities![repeated_table_name].schema).toBeDefined() - const schema = res.datasource.entities![repeated_table_name].schema - expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) - }) - } -) -datasourceDescribe( - { - name: "POST /api/datasources/:datasourceId/schema", - only: [DatabaseName.MYSQL], - }, - ({ config, dsProvider }) => { - let datasource: Datasource - let client: Knex + it("discovers tables from any schema in search path", async () => { + await client.schema.createTable(`${database}.table1`, table => { + table.increments("id1").primary() + }) + const res = await config.api.datasource.info(datasource) + expect(res.tableNames).toBeDefined() + expect(res.tableNames).toEqual(expect.arrayContaining(["table1"])) + }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - client = ds.client! - }) - - let tableName: string - beforeEach(async () => { - tableName = uniqueTableName() - }) - - afterEach(async () => { - await client.schema.dropTableIfExists(tableName) - }) - - it("recognises enum columns as options", async () => { - const enumColumnName = "status" - - await client.schema.createTable(tableName, table => { - table.increments("order_id").primary() - table.string("customer_name", 100).notNullable() - table.enum( - enumColumnName, - ["pending", "processing", "shipped", "delivered", "cancelled"], - { useNative: true, enumName: `${tableName}_${enumColumnName}` } + it("does not mix columns from different tables", async () => { + const repeated_table_name = "table_same_name" + await client.schema.createTable( + `${database}.${repeated_table_name}`, + table => { + table.increments("id").primary() + table.string("val1") + } ) + await client.schema.createTable( + `${database2}.${repeated_table_name}`, + table => { + table.increments("id2").primary() + table.string("val2") + } + ) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + tablesFilter: [repeated_table_name], + }) + expect( + res.datasource.entities![repeated_table_name].schema + ).toBeDefined() + const schema = res.datasource.entities![repeated_table_name].schema + expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) }) + } + ) - const res = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) + const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] }) - const table = res.datasource.entities![tableName] + if (descriptions.length) { + describe.each(descriptions)( + "POST /api/datasources/:datasourceId/schema ($dbName)", + ({ config, dsProvider }) => { + let datasource: Datasource + let client: Knex - expect(table).toBeDefined() - expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) - }) + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + client = ds.client! + }) + + let tableName: string + beforeEach(async () => { + tableName = uniqueTableName() + }) + + afterEach(async () => { + await client.schema.dropTableIfExists(tableName) + }) + + it("recognises enum columns as options", async () => { + const enumColumnName = "status" + + await client.schema.createTable(tableName, table => { + table.increments("order_id").primary() + table.string("customer_name", 100).notNullable() + table.enum( + enumColumnName, + ["pending", "processing", "shipped", "delivered", "cancelled"], + { useNative: true, enumName: `${tableName}_${enumColumnName}` } + ) + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + + const table = res.datasource.entities![tableName] + + expect(table).toBeDefined() + expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) + }) + } + ) } -) +} diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts index 7ef6b9a968..4f63579ba1 100644 --- a/packages/server/src/integration-test/postgres.spec.ts +++ b/packages/server/src/integration-test/postgres.spec.ts @@ -8,283 +8,292 @@ import { } from "../integrations/tests/utils" import { Knex } from "knex" -datasourceDescribe( - { name: "postgres integrations", only: [DatabaseName.POSTGRES] }, - ({ config, dsProvider }) => { - let datasource: Datasource - let client: Knex +const mainDescriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - client = ds.client! - }) +if (mainDescriptions.length) { + describe.each(mainDescriptions)( + "/postgres integrations", + ({ config, dsProvider }) => { + let datasource: Datasource + let client: Knex - afterAll(config.end) - - describe("POST /api/datasources/:datasourceId/schema", () => { - let tableName: string - - beforeEach(async () => { - tableName = generator.guid().replaceAll("-", "").substring(0, 10) + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + client = ds.client! }) - afterEach(async () => { - await client.schema.dropTableIfExists(tableName) - }) + afterAll(config.end) - it("recognises when a table has no primary key", async () => { - await client.schema.createTable(tableName, table => { - table.increments("id", { primaryKey: false }) + describe("POST /api/datasources/:datasourceId/schema", () => { + let tableName: string + + beforeEach(async () => { + tableName = generator.guid().replaceAll("-", "").substring(0, 10) }) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + afterEach(async () => { + await client.schema.dropTableIfExists(tableName) }) - expect(response.errors).toEqual({ - [tableName]: "Table must have a primary key.", - }) - }) + it("recognises when a table has no primary key", async () => { + await client.schema.createTable(tableName, table => { + table.increments("id", { primaryKey: false }) + }) - it("recognises when a table is using a reserved column name", async () => { - await client.schema.createTable(tableName, table => { - table.increments("_id").primary() - }) + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) - - expect(response.errors).toEqual({ - [tableName]: "Table contains invalid columns.", - }) - }) - - it("recognises enum columns as options", async () => { - const tableName = `orders_${generator - .guid() - .replaceAll("-", "") - .substring(0, 6)}` - - await client.schema.createTable(tableName, table => { - table.increments("order_id").primary() - table.string("customer_name").notNullable() - table.enum("status", ["pending", "processing", "shipped"], { - useNative: true, - enumName: `${tableName}_status`, + expect(response.errors).toEqual({ + [tableName]: "Table must have a primary key.", }) }) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + it("recognises when a table is using a reserved column name", async () => { + await client.schema.createTable(tableName, table => { + table.increments("_id").primary() + }) + + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + + expect(response.errors).toEqual({ + [tableName]: "Table contains invalid columns.", + }) }) - const table = response.datasource.entities?.[tableName] + it("recognises enum columns as options", async () => { + const tableName = `orders_${generator + .guid() + .replaceAll("-", "") + .substring(0, 6)}` - expect(table).toBeDefined() - expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) - }) - }) + await client.schema.createTable(tableName, table => { + table.increments("order_id").primary() + table.string("customer_name").notNullable() + table.enum("status", ["pending", "processing", "shipped"], { + useNative: true, + enumName: `${tableName}_status`, + }) + }) - describe("check custom column types", () => { - beforeAll(async () => { - await client.schema.createTable("binaryTable", table => { - table.binary("id").primary() - table.string("column1") - table.integer("column2") + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + + const table = response.datasource.entities?.[tableName] + + expect(table).toBeDefined() + expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) }) }) - it("should handle binary columns", async () => { - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + describe("check custom column types", () => { + beforeAll(async () => { + await client.schema.createTable("binaryTable", table => { + table.binary("id").primary() + table.string("column1") + table.integer("column2") + }) }) - expect(response.datasource.entities).toBeDefined() - const table = response.datasource.entities?.["binaryTable"] - expect(table).toBeDefined() - expect(table?.schema.id.externalType).toBe("bytea") - const row = await config.api.row.save(table?._id!, { - id: "1111", - column1: "hello", - column2: 222, - }) - expect(row._id).toBeDefined() - const decoded = decodeURIComponent(row._id!).replace(/'/g, '"') - expect(JSON.parse(decoded)[0]).toBe("1111") - }) - }) - describe("check fetching null/not null table", () => { - beforeAll(async () => { - await client.schema.createTable("nullableTable", table => { - table.increments("order_id").primary() - table.integer("order_number").notNullable() + it("should handle binary columns", async () => { + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + expect(response.datasource.entities).toBeDefined() + const table = response.datasource.entities?.["binaryTable"] + expect(table).toBeDefined() + expect(table?.schema.id.externalType).toBe("bytea") + const row = await config.api.row.save(table?._id!, { + id: "1111", + column1: "hello", + column2: 222, + }) + expect(row._id).toBeDefined() + const decoded = decodeURIComponent(row._id!).replace(/'/g, '"') + expect(JSON.parse(decoded)[0]).toBe("1111") }) }) - it("should be able to change the table to allow nullable and refetch this", async () => { - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) - const entities = response.datasource.entities - expect(entities).toBeDefined() - const nullableTable = entities?.["nullableTable"] - expect(nullableTable).toBeDefined() - expect( - nullableTable?.schema["order_number"].constraints?.presence - ).toEqual(true) - - // need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase - // is aware of - therefore we can try to fetch and make sure BB updates correctly - await client.schema.alterTable("nullableTable", table => { - table.setNullable("order_number") + describe("check fetching null/not null table", () => { + beforeAll(async () => { + await client.schema.createTable("nullableTable", table => { + table.increments("order_id").primary() + table.integer("order_number").notNullable() + }) }) - const responseAfter = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + it("should be able to change the table to allow nullable and refetch this", async () => { + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + const entities = response.datasource.entities + expect(entities).toBeDefined() + const nullableTable = entities?.["nullableTable"] + expect(nullableTable).toBeDefined() + expect( + nullableTable?.schema["order_number"].constraints?.presence + ).toEqual(true) + + // need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase + // is aware of - therefore we can try to fetch and make sure BB updates correctly + await client.schema.alterTable("nullableTable", table => { + table.setNullable("order_number") + }) + + const responseAfter = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + const entitiesAfter = responseAfter.datasource.entities + expect(entitiesAfter).toBeDefined() + const nullableTableAfter = entitiesAfter?.["nullableTable"] + expect(nullableTableAfter).toBeDefined() + expect( + nullableTableAfter?.schema["order_number"].constraints?.presence + ).toBeUndefined() }) - const entitiesAfter = responseAfter.datasource.entities - expect(entitiesAfter).toBeDefined() - const nullableTableAfter = entitiesAfter?.["nullableTable"] - expect(nullableTableAfter).toBeDefined() - expect( - nullableTableAfter?.schema["order_number"].constraints?.presence - ).toBeUndefined() }) - }) - describe("money field 💰", () => { - const tableName = "moneytable" - let table: Table + describe("money field 💰", () => { + const tableName = "moneytable" + let table: Table - beforeAll(async () => { - await client.raw(` + beforeAll(async () => { + await client.raw(` CREATE TABLE ${tableName} ( id serial PRIMARY KEY, price money ) `) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) - table = response.datasource.entities![tableName] - }) - - it("should be able to import a money field", async () => { - expect(table).toBeDefined() - expect(table?.schema.price.type).toBe(FieldType.NUMBER) - }) - - it("should be able to search a money field", async () => { - await config.api.row.bulkImport(table._id!, { - rows: [{ price: 200 }, { price: 300 }], + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + table = response.datasource.entities![tableName] }) - const { rows } = await config.api.row.search(table._id!, { - query: { - equal: { - price: 200, + it("should be able to import a money field", async () => { + expect(table).toBeDefined() + expect(table?.schema.price.type).toBe(FieldType.NUMBER) + }) + + it("should be able to search a money field", async () => { + await config.api.row.bulkImport(table._id!, { + rows: [{ price: 200 }, { price: 300 }], + }) + + const { rows } = await config.api.row.search(table._id!, { + query: { + equal: { + price: 200, + }, }, - }, + }) + expect(rows).toHaveLength(1) + expect(rows[0].price).toBe("200.00") + }) + + it("should be able to update a money field", async () => { + let row = await config.api.row.save(table._id!, { price: 200 }) + expect(row.price).toBe("200.00") + + row = await config.api.row.save(table._id!, { ...row, price: 300 }) + expect(row.price).toBe("300.00") + + row = await config.api.row.save(table._id!, { + ...row, + price: "400.00", + }) + expect(row.price).toBe("400.00") }) - expect(rows).toHaveLength(1) - expect(rows[0].price).toBe("200.00") }) + } + ) - it("should be able to update a money field", async () => { - let row = await config.api.row.save(table._id!, { price: 200 }) - expect(row.price).toBe("200.00") + const descriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] }) - row = await config.api.row.save(table._id!, { ...row, price: 300 }) - expect(row.price).toBe("300.00") + if (descriptions.length) { + describe.each(descriptions)( + "Integration compatibility with postgres search_path", + ({ config, dsProvider }) => { + let datasource: Datasource + let client: Knex + let schema1: string + let schema2: string - row = await config.api.row.save(table._id!, { ...row, price: "400.00" }) - expect(row.price).toBe("400.00") - }) - }) + beforeEach(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + const rawDatasource = ds.rawDatasource! + + schema1 = generator.guid().replaceAll("-", "") + schema2 = generator.guid().replaceAll("-", "") + + client = await knexClient(rawDatasource) + + await client.schema.createSchema(schema1) + await client.schema.createSchema(schema2) + + rawDatasource.config!.schema = `${schema1}, ${schema2}` + + client = await knexClient(rawDatasource) + datasource = await config.api.datasource.create(rawDatasource) + }) + + afterEach(async () => { + await client.schema.dropSchema(schema1, true) + await client.schema.dropSchema(schema2, true) + }) + + it("discovers tables from any schema in search path", async () => { + await client.schema.createTable(`${schema1}.table1`, table => { + table.increments("id1").primary() + }) + + await client.schema.createTable(`${schema2}.table2`, table => { + table.increments("id2").primary() + }) + + const response = await config.api.datasource.info(datasource) + expect(response.tableNames).toBeDefined() + expect(response.tableNames).toEqual( + expect.arrayContaining(["table1", "table2"]) + ) + }) + + it("does not mix columns from different tables", async () => { + const repeated_table_name = "table_same_name" + + await client.schema.createTable( + `${schema1}.${repeated_table_name}`, + table => { + table.increments("id").primary() + table.string("val1") + } + ) + + await client.schema.createTable( + `${schema2}.${repeated_table_name}`, + table => { + table.increments("id2").primary() + table.string("val2") + } + ) + + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + tablesFilter: [repeated_table_name], + }) + expect( + response.datasource.entities?.[repeated_table_name].schema + ).toBeDefined() + const schema = + response.datasource.entities?.[repeated_table_name].schema + expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"]) + }) + } + ) } -) - -datasourceDescribe( - { - name: "Integration compatibility with postgres search_path", - only: [DatabaseName.POSTGRES], - }, - ({ config, dsProvider }) => { - let datasource: Datasource - let client: Knex - let schema1: string - let schema2: string - - beforeEach(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - const rawDatasource = ds.rawDatasource! - - schema1 = generator.guid().replaceAll("-", "") - schema2 = generator.guid().replaceAll("-", "") - - client = await knexClient(rawDatasource) - - await client.schema.createSchema(schema1) - await client.schema.createSchema(schema2) - - rawDatasource.config!.schema = `${schema1}, ${schema2}` - - client = await knexClient(rawDatasource) - datasource = await config.api.datasource.create(rawDatasource) - }) - - afterEach(async () => { - await client.schema.dropSchema(schema1, true) - await client.schema.dropSchema(schema2, true) - }) - - it("discovers tables from any schema in search path", async () => { - await client.schema.createTable(`${schema1}.table1`, table => { - table.increments("id1").primary() - }) - - await client.schema.createTable(`${schema2}.table2`, table => { - table.increments("id2").primary() - }) - - const response = await config.api.datasource.info(datasource) - expect(response.tableNames).toBeDefined() - expect(response.tableNames).toEqual( - expect.arrayContaining(["table1", "table2"]) - ) - }) - - it("does not mix columns from different tables", async () => { - const repeated_table_name = "table_same_name" - - await client.schema.createTable( - `${schema1}.${repeated_table_name}`, - table => { - table.increments("id").primary() - table.string("val1") - } - ) - - await client.schema.createTable( - `${schema2}.${repeated_table_name}`, - table => { - table.increments("id2").primary() - table.string("val2") - } - ) - - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - tablesFilter: [repeated_table_name], - }) - expect( - response.datasource.entities?.[repeated_table_name].schema - ).toBeDefined() - const schema = response.datasource.entities?.[repeated_table_name].schema - expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"]) - }) - } -) +} diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index 0a07371cd3..1c74e6b1ff 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -281,8 +281,14 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { case MSSQLConfigAuthType.NTLM: { const { domain, trustServerCertificate } = this.config.ntlmConfig || {} + + if (!domain) { + throw Error("Domain must be provided for NTLM config") + } + clientCfg.authentication = { type: "ntlm", + // @ts-expect-error - username and password not required for NTLM options: { domain, }, diff --git a/packages/server/src/integrations/snowflake.ts b/packages/server/src/integrations/snowflake.ts index 9a1dac10e5..838cbb4106 100644 --- a/packages/server/src/integrations/snowflake.ts +++ b/packages/server/src/integrations/snowflake.ts @@ -6,7 +6,8 @@ import { QueryType, SqlQuery, } from "@budibase/types" -import { Snowflake } from "snowflake-promise" +import snowflakeSdk, { SnowflakeError } from "snowflake-sdk" +import { promisify } from "util" interface SnowflakeConfig { account: string @@ -71,11 +72,52 @@ const SCHEMA: Integration = { }, } -class SnowflakeIntegration { - private client: Snowflake +class SnowflakePromise { + config: SnowflakeConfig + client?: snowflakeSdk.Connection constructor(config: SnowflakeConfig) { - this.client = new Snowflake(config) + this.config = config + } + + async connect() { + if (this.client?.isUp()) return + + this.client = snowflakeSdk.createConnection(this.config) + const connectAsync = promisify(this.client.connect.bind(this.client)) + return connectAsync() + } + + async execute(sql: string) { + return new Promise((resolve, reject) => { + if (!this.client) { + throw Error( + "No snowflake client present to execute query. Run connect() first to initialise." + ) + } + + this.client.execute({ + sqlText: sql, + complete: function ( + err: SnowflakeError | undefined, + statementExecuted: any, + rows: any + ) { + if (err) { + return reject(err) + } + resolve(rows) + }, + }) + }) + } +} + +class SnowflakeIntegration { + private client: SnowflakePromise + + constructor(config: SnowflakeConfig) { + this.client = new SnowflakePromise(config) } async testConnection(): Promise { diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts index 5e7316c39a..dcdaece191 100644 --- a/packages/server/src/integrations/tests/utils/index.ts +++ b/packages/server/src/integrations/tests/utils/index.ts @@ -35,7 +35,6 @@ const providers: Record = { } export interface DatasourceDescribeOpts { - name: string only?: DatabaseName[] exclude?: DatabaseName[] } @@ -102,16 +101,12 @@ function createDummyTest() { }) } -export function datasourceDescribe( - opts: DatasourceDescribeOpts, - cb: (args: DatasourceDescribeReturn) => void -) { +export function datasourceDescribe(opts: DatasourceDescribeOpts) { if (process.env.DATASOURCE === "none") { createDummyTest() - return } - const { name, only, exclude } = opts + const { only, exclude } = opts if (only && exclude) { throw new Error("you can only supply one of 'only' or 'exclude'") @@ -130,36 +125,28 @@ export function datasourceDescribe( if (databases.length === 0) { createDummyTest() - return } - describe.each(databases)(name, name => { - const config = new TestConfiguration() - - afterAll(() => { - config.end() - }) - - cb({ - name, - config, - dsProvider: () => createDatasources(config, name), - isInternal: name === DatabaseName.SQS, - isExternal: name !== DatabaseName.SQS, - isSql: [ - DatabaseName.MARIADB, - DatabaseName.MYSQL, - DatabaseName.POSTGRES, - DatabaseName.SQL_SERVER, - DatabaseName.ORACLE, - ].includes(name), - isMySQL: name === DatabaseName.MYSQL, - isPostgres: name === DatabaseName.POSTGRES, - isMongodb: name === DatabaseName.MONGODB, - isMSSQL: name === DatabaseName.SQL_SERVER, - isOracle: name === DatabaseName.ORACLE, - }) - }) + const config = new TestConfiguration() + return databases.map(dbName => ({ + dbName, + config, + dsProvider: () => createDatasources(config, dbName), + isInternal: dbName === DatabaseName.SQS, + isExternal: dbName !== DatabaseName.SQS, + isSql: [ + DatabaseName.MARIADB, + DatabaseName.MYSQL, + DatabaseName.POSTGRES, + DatabaseName.SQL_SERVER, + DatabaseName.ORACLE, + ].includes(dbName), + isMySQL: dbName === DatabaseName.MYSQL, + isPostgres: dbName === DatabaseName.POSTGRES, + isMongodb: dbName === DatabaseName.MONGODB, + isMSSQL: dbName === DatabaseName.SQL_SERVER, + isOracle: dbName === DatabaseName.ORACLE, + })) } function getDatasource( diff --git a/packages/server/src/sdk/app/rows/search/tests/search.spec.ts b/packages/server/src/sdk/app/rows/search/tests/search.spec.ts index 78e5ffa30b..b424c3707d 100644 --- a/packages/server/src/sdk/app/rows/search/tests/search.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/search.spec.ts @@ -19,202 +19,206 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures" // These test cases are only for things that cannot be tested through the API // (e.g. limiting searches to returning specific fields). If it's possible to // test through the API, it should be done there instead. -datasourceDescribe( - { name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, dsProvider, isInternal }) => { - let datasource: Datasource | undefined - let table: Table +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - }) +if (descriptions.length) { + describe.each(descriptions)( + "search sdk ($dbName)", + ({ config, dsProvider, isInternal }) => { + let datasource: Datasource | undefined + let table: Table - beforeEach(async () => { - const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata = - isInternal - ? { - name: "id", - type: FieldType.AUTO, - subtype: AutoFieldSubType.AUTO_ID, - autocolumn: true, - } - : { - name: "id", - type: FieldType.NUMBER, - autocolumn: true, - } - - table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], - schema: { - id: idFieldSchema, - name: { - name: "name", - type: FieldType.STRING, - }, - surname: { - name: "surname", - type: FieldType.STRING, - }, - age: { - name: "age", - type: FieldType.NUMBER, - }, - address: { - name: "address", - type: FieldType.STRING, - }, - }, - }) - ) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { - name: generator.first(), - surname: generator.last(), - age: generator.age(), - address: generator.address(), - }) - } - }) - - afterAll(async () => { - config.end() - }) - - it("querying by fields will always return data attribute columns", async () => { - await config.doInContext(config.appId, async () => { - const { rows } = await search({ - tableId: table._id!, - query: {}, - fields: ["name", "age"], - }) - - expect(rows).toHaveLength(10) - for (const row of rows) { - const keys = Object.keys(row) - expect(keys).toContain("name") - expect(keys).toContain("age") - expect(keys).not.toContain("surname") - expect(keys).not.toContain("address") - } + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource }) - }) - !isInternal && - it("will decode _id in oneOf query", async () => { - await config.doInContext(config.appId, async () => { - const result = await search({ - tableId: table._id!, - query: { - oneOf: { - _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], + beforeEach(async () => { + const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata = + isInternal + ? { + name: "id", + type: FieldType.AUTO, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + } + : { + name: "id", + type: FieldType.NUMBER, + autocolumn: true, + } + + table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema: { + id: idFieldSchema, + name: { + name: "name", + type: FieldType.STRING, + }, + surname: { + name: "surname", + type: FieldType.STRING, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + address: { + name: "address", + type: FieldType.STRING, }, }, }) + ) - expect(result.rows).toHaveLength(3) - expect(result.rows.map(row => row.id)).toEqual( - expect.arrayContaining([1, 4, 8]) - ) - }) - }) - - it("does not allow accessing hidden fields", async () => { - await config.doInContext(config.appId, async () => { - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - name: { - ...table.schema.name, - visible: true, - }, - age: { - ...table.schema.age, - visible: false, - }, - }, - }) - const result = await search({ - tableId: table._id!, - query: {}, - }) - expect(result.rows).toHaveLength(10) - for (const row of result.rows) { - const keys = Object.keys(row) - expect(keys).toContain("name") - expect(keys).toContain("surname") - expect(keys).toContain("address") - expect(keys).not.toContain("age") + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { + name: generator.first(), + surname: generator.last(), + age: generator.age(), + address: generator.address(), + }) } }) - }) - it("does not allow accessing hidden fields even if requested", async () => { - await config.doInContext(config.appId, async () => { - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - name: { - ...table.schema.name, - visible: true, - }, - age: { - ...table.schema.age, - visible: false, - }, - }, - }) - const result = await search({ - tableId: table._id!, - query: {}, - fields: ["name", "age"], - }) - expect(result.rows).toHaveLength(10) - for (const row of result.rows) { - const keys = Object.keys(row) - expect(keys).toContain("name") - expect(keys).not.toContain("age") - expect(keys).not.toContain("surname") - expect(keys).not.toContain("address") - } + afterAll(async () => { + config.end() }) - }) - it.each([ - [["id", "name", "age"], 3], - [["name", "age"], 10], - ])( - "cannot query by non search fields (fields: %s)", - async (queryFields, expectedRows) => { + it("querying by fields will always return data attribute columns", async () => { await config.doInContext(config.appId, async () => { const { rows } = await search({ tableId: table._id!, - query: { - $or: { - conditions: [ - { - $and: { - conditions: [ - { range: { id: { low: 2, high: 4 } } }, - { range: { id: { low: 3, high: 5 } } }, - ], - }, - }, - { equal: { id: 7 } }, - ], - }, - }, - fields: queryFields, + query: {}, + fields: ["name", "age"], }) - expect(rows).toHaveLength(expectedRows) + expect(rows).toHaveLength(10) + for (const row of rows) { + const keys = Object.keys(row) + expect(keys).toContain("name") + expect(keys).toContain("age") + expect(keys).not.toContain("surname") + expect(keys).not.toContain("address") + } }) - } - ) - } -) + }) + + !isInternal && + it("will decode _id in oneOf query", async () => { + await config.doInContext(config.appId, async () => { + const result = await search({ + tableId: table._id!, + query: { + oneOf: { + _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], + }, + }, + }) + + expect(result.rows).toHaveLength(3) + expect(result.rows.map(row => row.id)).toEqual( + expect.arrayContaining([1, 4, 8]) + ) + }) + }) + + it("does not allow accessing hidden fields", async () => { + await config.doInContext(config.appId, async () => { + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + name: { + ...table.schema.name, + visible: true, + }, + age: { + ...table.schema.age, + visible: false, + }, + }, + }) + const result = await search({ + tableId: table._id!, + query: {}, + }) + expect(result.rows).toHaveLength(10) + for (const row of result.rows) { + const keys = Object.keys(row) + expect(keys).toContain("name") + expect(keys).toContain("surname") + expect(keys).toContain("address") + expect(keys).not.toContain("age") + } + }) + }) + + it("does not allow accessing hidden fields even if requested", async () => { + await config.doInContext(config.appId, async () => { + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + name: { + ...table.schema.name, + visible: true, + }, + age: { + ...table.schema.age, + visible: false, + }, + }, + }) + const result = await search({ + tableId: table._id!, + query: {}, + fields: ["name", "age"], + }) + expect(result.rows).toHaveLength(10) + for (const row of result.rows) { + const keys = Object.keys(row) + expect(keys).toContain("name") + expect(keys).not.toContain("age") + expect(keys).not.toContain("surname") + expect(keys).not.toContain("address") + } + }) + }) + + it.each([ + [["id", "name", "age"], 3], + [["name", "age"], 10], + ])( + "cannot query by non search fields (fields: %s)", + async (queryFields, expectedRows) => { + await config.doInContext(config.appId, async () => { + const { rows } = await search({ + tableId: table._id!, + query: { + $or: { + conditions: [ + { + $and: { + conditions: [ + { range: { id: { low: 2, high: 4 } } }, + { range: { id: { low: 3, high: 5 } } }, + ], + }, + }, + { equal: { id: 7 } }, + ], + }, + }, + fields: queryFields, + }) + + expect(rows).toHaveLength(expectedRows) + }) + } + ) + } + ) +} diff --git a/packages/server/src/utilities/fileSystem/processor.ts b/packages/server/src/utilities/fileSystem/processor.ts index a32a7568f4..03fbf4ad0a 100644 --- a/packages/server/src/utilities/fileSystem/processor.ts +++ b/packages/server/src/utilities/fileSystem/processor.ts @@ -1,4 +1,4 @@ -import jimp from "jimp" +import { Jimp } from "jimp" const FORMATS = { IMAGES: ["png", "jpg", "jpeg", "gif", "bmp", "tiff"], @@ -6,8 +6,8 @@ const FORMATS = { function processImage(file: { path: string }) { // this will overwrite the temp file - return jimp.read(file.path).then(img => { - return img.resize(300, jimp.AUTO).write(file.path) + return Jimp.read(file.path).then(img => { + return img.resize({ w: 256 }).write(file.path as `${string}.${string}`) }) } diff --git a/packages/worker/package.json b/packages/worker/package.json index 2406a5b50e..85eae6c88a 100644 --- a/packages/worker/package.json +++ b/packages/worker/package.json @@ -40,17 +40,17 @@ "dependencies": { "@budibase/backend-core": "0.0.0", "@budibase/pro": "0.0.0", + "@budibase/shared-core": "0.0.0", "@budibase/string-templates": "0.0.0", "@budibase/types": "0.0.0", - "@budibase/shared-core": "0.0.0", - "@koa/router": "8.0.8", + "@koa/router": "13.1.0", "@techpass/passport-openidconnect": "0.3.3", "@types/global-agent": "2.1.1", - "aws-sdk": "2.1030.0", + "aws-sdk": "2.1692.0", "bcrypt": "5.1.0", "bcryptjs": "2.4.3", "bull": "4.10.1", - "dd-trace": "5.2.0", + "dd-trace": "5.23.0", "dotenv": "8.6.0", "global-agent": "3.0.0", "ical-generator": "4.1.0", @@ -82,7 +82,7 @@ "@types/jest": "29.5.5", "@types/jsonwebtoken": "9.0.3", "@types/koa": "2.13.4", - "@types/koa__router": "8.0.8", + "@types/koa__router": "12.0.4", "@types/lodash": "4.14.200", "@types/node": "^22.9.0", "@types/node-fetch": "2.6.4", diff --git a/packages/worker/src/api/controllers/global/users.ts b/packages/worker/src/api/controllers/global/users.ts index fa19948bf5..f415698fc0 100644 --- a/packages/worker/src/api/controllers/global/users.ts +++ b/packages/worker/src/api/controllers/global/users.ts @@ -40,6 +40,7 @@ import { import { checkAnyUserExists } from "../../../utilities/users" import { isEmailConfigured } from "../../../utilities/email" import { BpmStatusKey, BpmStatusValue, utils } from "@budibase/shared-core" +import crypto from "crypto" const MAX_USERS_UPLOAD_LIMIT = 1000 diff --git a/yarn.lock b/yarn.lock index d3f1dbd5f9..aaadbbe7bd 100644 --- a/yarn.lock +++ b/yarn.lock @@ -82,561 +82,615 @@ call-me-maybe "^1.0.1" z-schema "^5.0.1" -"@aws-crypto/crc32@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-3.0.0.tgz#07300eca214409c33e3ff769cd5697b57fdd38fa" - integrity "sha1-BzAOyiFECcM+P/dpzVaXtX/dOPo= sha512-IzSgsrxUcsrejQbPVilIKy16kAT52EwB6zSaI+M3xxIhKh5+aldEyvI+z6erM7TCLB2BJsFrtHjp6/4/sr+3dA==" +"@aws-crypto/crc32@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32/-/crc32-5.2.0.tgz#cfcc22570949c98c6689cfcbd2d693d36cdae2e1" + integrity sha512-nLbCWqQNgUiwwtFsen1AdzAtvuLRsQS8rYgMuxCrdKf9kOssamGLuPwyTY9wyYblNr9+1XM8v6zoDTPPSIeANg== dependencies: - "@aws-crypto/util" "^3.0.0" + "@aws-crypto/util" "^5.2.0" "@aws-sdk/types" "^3.222.0" - tslib "^1.11.1" + tslib "^2.6.2" -"@aws-crypto/crc32c@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/crc32c/-/crc32c-3.0.0.tgz#016c92da559ef638a84a245eecb75c3e97cb664f" - integrity "sha1-AWyS2lWe9jioSiRe7LdcPpfLZk8= sha512-ENNPPManmnVJ4BTXlOjAgD7URidbAznURqD0KvfREyc4o20DPYdEldU1f5cQ7Jbj0CJJSPaMIk/9ZshdB3210w==" +"@aws-crypto/crc32c@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/crc32c/-/crc32c-5.2.0.tgz#4e34aab7f419307821509a98b9b08e84e0c1917e" + integrity sha512-+iWb8qaHLYKrNvGRbiYRHSdKRWhto5XlZUEBwDjYNf+ly5SVYG6zEoYIdxvf5R3zyeP16w4PLBn3rH1xc74Rag== dependencies: - "@aws-crypto/util" "^3.0.0" + "@aws-crypto/util" "^5.2.0" "@aws-sdk/types" "^3.222.0" - tslib "^1.11.1" + tslib "^2.6.2" -"@aws-crypto/ie11-detection@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/ie11-detection/-/ie11-detection-3.0.0.tgz#640ae66b4ec3395cee6a8e94ebcd9f80c24cd688" - integrity "sha1-ZArma07DOVzuao6U682fgMJM1og= sha512-341lBBkiY1DfDNKai/wXM3aujNBkXR7tq1URPQDL9wi3AUbI80NR74uF1TXHMm7po1AcnFk8iu2S2IeU/+/A+Q==" +"@aws-crypto/sha1-browser@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha1-browser/-/sha1-browser-5.2.0.tgz#b0ee2d2821d3861f017e965ef3b4cb38e3b6a0f4" + integrity sha512-OH6lveCFfcDjX4dbAvCFSYUjJZjDr/3XJ3xHtjn3Oj5b9RjojQo8npoLeA/bNwkOkrSQ0wgrHzXk4tDRxGKJeg== dependencies: - tslib "^1.11.1" - -"@aws-crypto/sha1-browser@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha1-browser/-/sha1-browser-3.0.0.tgz#f9083c00782b24714f528b1a1fef2174002266a3" - integrity "sha1-+Qg8AHgrJHFPUosaH+8hdAAiZqM= sha512-NJth5c997GLHs6nOYTzFKTbYdMNA6/1XlKVgnZoaZcQ7z7UJlOgj2JdbHE8tiYLS3fzXNCguct77SPGat2raSw==" - dependencies: - "@aws-crypto/ie11-detection" "^3.0.0" - "@aws-crypto/supports-web-crypto" "^3.0.0" - "@aws-crypto/util" "^3.0.0" + "@aws-crypto/supports-web-crypto" "^5.2.0" + "@aws-crypto/util" "^5.2.0" "@aws-sdk/types" "^3.222.0" "@aws-sdk/util-locate-window" "^3.0.0" - "@aws-sdk/util-utf8-browser" "^3.0.0" - tslib "^1.11.1" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" -"@aws-crypto/sha256-browser@3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-3.0.0.tgz#05f160138ab893f1c6ba5be57cfd108f05827766" - integrity "sha1-BfFgE4q4k/HGulvlfP0QjwWCd2Y= sha512-8VLmW2B+gjFbU5uMeqtQM6Nj0/F1bro80xQXCW6CQBWgosFWXTx77aeOF5CAIAmbOK64SdMBJdNr6J41yP5mvQ==" +"@aws-crypto/sha256-browser@5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-browser/-/sha256-browser-5.2.0.tgz#153895ef1dba6f9fce38af550e0ef58988eb649e" + integrity sha512-AXfN/lGotSQwu6HNcEsIASo7kWXZ5HYWvfOmSNKDsEqC4OashTp8alTmaz+F7TC2L083SFv5RdB+qU3Vs1kZqw== dependencies: - "@aws-crypto/ie11-detection" "^3.0.0" - "@aws-crypto/sha256-js" "^3.0.0" - "@aws-crypto/supports-web-crypto" "^3.0.0" - "@aws-crypto/util" "^3.0.0" + "@aws-crypto/sha256-js" "^5.2.0" + "@aws-crypto/supports-web-crypto" "^5.2.0" + "@aws-crypto/util" "^5.2.0" "@aws-sdk/types" "^3.222.0" "@aws-sdk/util-locate-window" "^3.0.0" - "@aws-sdk/util-utf8-browser" "^3.0.0" - tslib "^1.11.1" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" -"@aws-crypto/sha256-js@3.0.0", "@aws-crypto/sha256-js@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-3.0.0.tgz#f06b84d550d25521e60d2a0e2a90139341e007c2" - integrity "sha1-8GuE1VDSVSHmDSoOKpATk0HgB8I= sha512-PnNN7os0+yd1XvXAy23CFOmTbMaDxgxXtTKHybrJ39Y8kGzBATgBFibWJKH6BhytLI/Zyszs87xCOBNyBig6vQ==" +"@aws-crypto/sha256-js@5.2.0", "@aws-crypto/sha256-js@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/sha256-js/-/sha256-js-5.2.0.tgz#c4fdb773fdbed9a664fc1a95724e206cf3860042" + integrity sha512-FFQQyu7edu4ufvIZ+OadFpHHOt+eSTBaYaki44c+akjg7qZg9oOQeLlk77F6tSYqjDAFClrHJk9tMf0HdVyOvA== dependencies: - "@aws-crypto/util" "^3.0.0" + "@aws-crypto/util" "^5.2.0" "@aws-sdk/types" "^3.222.0" - tslib "^1.11.1" + tslib "^2.6.2" -"@aws-crypto/supports-web-crypto@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-3.0.0.tgz#5d1bf825afa8072af2717c3e455f35cda0103ec2" - integrity "sha1-XRv4Ja+oByrycXw+RV81zaAQPsI= sha512-06hBdMwUAb2WFTuGG73LSC0wfPu93xWwo5vL2et9eymgmu3Id5vFAHBbajVWiGhPO37qcsdCap/FqXvJGJWPIg==" +"@aws-crypto/supports-web-crypto@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/supports-web-crypto/-/supports-web-crypto-5.2.0.tgz#a1e399af29269be08e695109aa15da0a07b5b5fb" + integrity sha512-iAvUotm021kM33eCdNfwIN//F77/IADDSs58i+MDaOqFrVjZo9bAal0NK7HurRuWLLpF1iLX7gbWrjHjeo+YFg== dependencies: - tslib "^1.11.1" + tslib "^2.6.2" -"@aws-crypto/util@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-3.0.0.tgz#1c7ca90c29293f0883468ad48117937f0fe5bfb0" - integrity "sha1-HHypDCkpPwiDRorUgReTfw/lv7A= sha512-2OJlpeJpCR48CC8r+uKVChzs9Iungj9wkZrl8Z041DWEWvyIHILYKCPNzJghKsivj+S3mLo6BVc7mBNzdxA46w==" +"@aws-crypto/util@5.2.0", "@aws-crypto/util@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@aws-crypto/util/-/util-5.2.0.tgz#71284c9cffe7927ddadac793c14f14886d3876da" + integrity sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ== dependencies: "@aws-sdk/types" "^3.222.0" - "@aws-sdk/util-utf8-browser" "^3.0.0" - tslib "^1.11.1" + "@smithy/util-utf8" "^2.0.0" + tslib "^2.6.2" "@aws-sdk/client-s3@^3.388.0": - version "3.423.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.423.0.tgz#b15fc64db09f1698bf4ad19f6f8e3b57c15e5305" - integrity "sha1-sV/GTbCfFpi/StGfb447V8FeUwU= sha512-Sn/6fotTDGp+uUfPU0JrKszHT/cYwZonly6Ahi4R/uxASLQnOEAF7MwVSjms+/LGu72Qs0Tt7B7RKW76GI4OIA==" + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-s3/-/client-s3-3.693.0.tgz#188b621498ffaeb7b1ea5794f61e3e8d9a4bcac2" + integrity sha512-vgGI2e0Q6pzyhqfrSysi+sk/i+Nl+lMon67oqj/57RcCw9daL1/inpS+ADuwHpiPWkrg+U0bOXnmHjkLeTslJg== dependencies: - "@aws-crypto/sha1-browser" "3.0.0" - "@aws-crypto/sha256-browser" "3.0.0" - "@aws-crypto/sha256-js" "3.0.0" - "@aws-sdk/client-sts" "3.423.0" - "@aws-sdk/credential-provider-node" "3.423.0" - "@aws-sdk/middleware-bucket-endpoint" "3.418.0" - "@aws-sdk/middleware-expect-continue" "3.418.0" - "@aws-sdk/middleware-flexible-checksums" "3.418.0" - "@aws-sdk/middleware-host-header" "3.418.0" - "@aws-sdk/middleware-location-constraint" "3.418.0" - "@aws-sdk/middleware-logger" "3.418.0" - "@aws-sdk/middleware-recursion-detection" "3.418.0" - "@aws-sdk/middleware-sdk-s3" "3.418.0" - "@aws-sdk/middleware-signing" "3.418.0" - "@aws-sdk/middleware-ssec" "3.418.0" - "@aws-sdk/middleware-user-agent" "3.418.0" - "@aws-sdk/region-config-resolver" "3.418.0" - "@aws-sdk/signature-v4-multi-region" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@aws-sdk/util-endpoints" "3.418.0" - "@aws-sdk/util-user-agent-browser" "3.418.0" - "@aws-sdk/util-user-agent-node" "3.418.0" - "@aws-sdk/xml-builder" "3.310.0" - "@smithy/config-resolver" "^2.0.10" - "@smithy/eventstream-serde-browser" "^2.0.9" - "@smithy/eventstream-serde-config-resolver" "^2.0.9" - "@smithy/eventstream-serde-node" "^2.0.9" - "@smithy/fetch-http-handler" "^2.1.5" - "@smithy/hash-blob-browser" "^2.0.9" - "@smithy/hash-node" "^2.0.9" - "@smithy/hash-stream-node" "^2.0.9" - "@smithy/invalid-dependency" "^2.0.9" - "@smithy/md5-js" "^2.0.9" - "@smithy/middleware-content-length" "^2.0.11" - "@smithy/middleware-endpoint" "^2.0.9" - "@smithy/middleware-retry" "^2.0.12" - "@smithy/middleware-serde" "^2.0.9" - "@smithy/middleware-stack" "^2.0.2" - "@smithy/node-config-provider" "^2.0.12" - "@smithy/node-http-handler" "^2.1.5" - "@smithy/protocol-http" "^3.0.5" - "@smithy/smithy-client" "^2.1.6" - "@smithy/types" "^2.3.3" - "@smithy/url-parser" "^2.0.9" - "@smithy/util-base64" "^2.0.0" - "@smithy/util-body-length-browser" "^2.0.0" - "@smithy/util-body-length-node" "^2.1.0" - "@smithy/util-defaults-mode-browser" "^2.0.10" - "@smithy/util-defaults-mode-node" "^2.0.12" - "@smithy/util-retry" "^2.0.2" - "@smithy/util-stream" "^2.0.12" - "@smithy/util-utf8" "^2.0.0" - "@smithy/util-waiter" "^2.0.9" - fast-xml-parser "4.2.5" + "@aws-crypto/sha1-browser" "5.2.0" + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/client-sso-oidc" "3.693.0" + "@aws-sdk/client-sts" "3.693.0" + "@aws-sdk/core" "3.693.0" + "@aws-sdk/credential-provider-node" "3.693.0" + "@aws-sdk/middleware-bucket-endpoint" "3.693.0" + "@aws-sdk/middleware-expect-continue" "3.693.0" + "@aws-sdk/middleware-flexible-checksums" "3.693.0" + "@aws-sdk/middleware-host-header" "3.693.0" + "@aws-sdk/middleware-location-constraint" "3.693.0" + "@aws-sdk/middleware-logger" "3.693.0" + "@aws-sdk/middleware-recursion-detection" "3.693.0" + "@aws-sdk/middleware-sdk-s3" "3.693.0" + "@aws-sdk/middleware-ssec" "3.693.0" + "@aws-sdk/middleware-user-agent" "3.693.0" + "@aws-sdk/region-config-resolver" "3.693.0" + "@aws-sdk/signature-v4-multi-region" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@aws-sdk/util-endpoints" "3.693.0" + "@aws-sdk/util-user-agent-browser" "3.693.0" + "@aws-sdk/util-user-agent-node" "3.693.0" + "@aws-sdk/xml-builder" "3.693.0" + "@smithy/config-resolver" "^3.0.11" + "@smithy/core" "^2.5.2" + "@smithy/eventstream-serde-browser" "^3.0.12" + "@smithy/eventstream-serde-config-resolver" "^3.0.9" + "@smithy/eventstream-serde-node" "^3.0.11" + "@smithy/fetch-http-handler" "^4.1.0" + "@smithy/hash-blob-browser" "^3.1.8" + "@smithy/hash-node" "^3.0.9" + "@smithy/hash-stream-node" "^3.1.8" + "@smithy/invalid-dependency" "^3.0.9" + "@smithy/md5-js" "^3.0.9" + "@smithy/middleware-content-length" "^3.0.11" + "@smithy/middleware-endpoint" "^3.2.2" + "@smithy/middleware-retry" "^3.0.26" + "@smithy/middleware-serde" "^3.0.9" + "@smithy/middleware-stack" "^3.0.9" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/node-http-handler" "^3.3.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/smithy-client" "^3.4.3" + "@smithy/types" "^3.7.0" + "@smithy/url-parser" "^3.0.9" + "@smithy/util-base64" "^3.0.0" + "@smithy/util-body-length-browser" "^3.0.0" + "@smithy/util-body-length-node" "^3.0.0" + "@smithy/util-defaults-mode-browser" "^3.0.26" + "@smithy/util-defaults-mode-node" "^3.0.26" + "@smithy/util-endpoints" "^2.1.5" + "@smithy/util-middleware" "^3.0.9" + "@smithy/util-retry" "^3.0.9" + "@smithy/util-stream" "^3.3.0" + "@smithy/util-utf8" "^3.0.0" + "@smithy/util-waiter" "^3.1.8" + tslib "^2.6.2" + +"@aws-sdk/client-sso-oidc@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso-oidc/-/client-sso-oidc-3.693.0.tgz#2fd7f93bd81839f5cd08c5e6e9a578b80572d3c4" + integrity sha512-UEDbYlYtK/e86OOMyFR4zEPyenIxDzO2DRdz3fwVW7RzZ94wfmSwBh/8skzPTuY1G7sI064cjHW0b0QG01Sdtg== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.693.0" + "@aws-sdk/credential-provider-node" "3.693.0" + "@aws-sdk/middleware-host-header" "3.693.0" + "@aws-sdk/middleware-logger" "3.693.0" + "@aws-sdk/middleware-recursion-detection" "3.693.0" + "@aws-sdk/middleware-user-agent" "3.693.0" + "@aws-sdk/region-config-resolver" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@aws-sdk/util-endpoints" "3.693.0" + "@aws-sdk/util-user-agent-browser" "3.693.0" + "@aws-sdk/util-user-agent-node" "3.693.0" + "@smithy/config-resolver" "^3.0.11" + "@smithy/core" "^2.5.2" + "@smithy/fetch-http-handler" "^4.1.0" + "@smithy/hash-node" "^3.0.9" + "@smithy/invalid-dependency" "^3.0.9" + "@smithy/middleware-content-length" "^3.0.11" + "@smithy/middleware-endpoint" "^3.2.2" + "@smithy/middleware-retry" "^3.0.26" + "@smithy/middleware-serde" "^3.0.9" + "@smithy/middleware-stack" "^3.0.9" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/node-http-handler" "^3.3.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/smithy-client" "^3.4.3" + "@smithy/types" "^3.7.0" + "@smithy/url-parser" "^3.0.9" + "@smithy/util-base64" "^3.0.0" + "@smithy/util-body-length-browser" "^3.0.0" + "@smithy/util-body-length-node" "^3.0.0" + "@smithy/util-defaults-mode-browser" "^3.0.26" + "@smithy/util-defaults-mode-node" "^3.0.26" + "@smithy/util-endpoints" "^2.1.5" + "@smithy/util-middleware" "^3.0.9" + "@smithy/util-retry" "^3.0.9" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/client-sso@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.693.0.tgz#9cd5e07e57013b8c7980512810d775d7b6f67e36" + integrity sha512-QEynrBC26x6TG9ZMzApR/kZ3lmt4lEIs2D+cHuDxt6fDGzahBUsQFBwJqhizzsM97JJI5YvmJhmihoYjdSSaXA== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/core" "3.693.0" + "@aws-sdk/middleware-host-header" "3.693.0" + "@aws-sdk/middleware-logger" "3.693.0" + "@aws-sdk/middleware-recursion-detection" "3.693.0" + "@aws-sdk/middleware-user-agent" "3.693.0" + "@aws-sdk/region-config-resolver" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@aws-sdk/util-endpoints" "3.693.0" + "@aws-sdk/util-user-agent-browser" "3.693.0" + "@aws-sdk/util-user-agent-node" "3.693.0" + "@smithy/config-resolver" "^3.0.11" + "@smithy/core" "^2.5.2" + "@smithy/fetch-http-handler" "^4.1.0" + "@smithy/hash-node" "^3.0.9" + "@smithy/invalid-dependency" "^3.0.9" + "@smithy/middleware-content-length" "^3.0.11" + "@smithy/middleware-endpoint" "^3.2.2" + "@smithy/middleware-retry" "^3.0.26" + "@smithy/middleware-serde" "^3.0.9" + "@smithy/middleware-stack" "^3.0.9" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/node-http-handler" "^3.3.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/smithy-client" "^3.4.3" + "@smithy/types" "^3.7.0" + "@smithy/url-parser" "^3.0.9" + "@smithy/util-base64" "^3.0.0" + "@smithy/util-body-length-browser" "^3.0.0" + "@smithy/util-body-length-node" "^3.0.0" + "@smithy/util-defaults-mode-browser" "^3.0.26" + "@smithy/util-defaults-mode-node" "^3.0.26" + "@smithy/util-endpoints" "^2.1.5" + "@smithy/util-middleware" "^3.0.9" + "@smithy/util-retry" "^3.0.9" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/client-sts@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.693.0.tgz#9e2c418f4850269635632bee4d1a31057c04bcc5" + integrity sha512-4S2y7VEtvdnjJX4JPl4kDQlslxXEZFnC50/UXVUYSt/AMc5A/GgspFNA5FVz4E3Gwpfobbf23hR2NBF8AGvYoQ== + dependencies: + "@aws-crypto/sha256-browser" "5.2.0" + "@aws-crypto/sha256-js" "5.2.0" + "@aws-sdk/client-sso-oidc" "3.693.0" + "@aws-sdk/core" "3.693.0" + "@aws-sdk/credential-provider-node" "3.693.0" + "@aws-sdk/middleware-host-header" "3.693.0" + "@aws-sdk/middleware-logger" "3.693.0" + "@aws-sdk/middleware-recursion-detection" "3.693.0" + "@aws-sdk/middleware-user-agent" "3.693.0" + "@aws-sdk/region-config-resolver" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@aws-sdk/util-endpoints" "3.693.0" + "@aws-sdk/util-user-agent-browser" "3.693.0" + "@aws-sdk/util-user-agent-node" "3.693.0" + "@smithy/config-resolver" "^3.0.11" + "@smithy/core" "^2.5.2" + "@smithy/fetch-http-handler" "^4.1.0" + "@smithy/hash-node" "^3.0.9" + "@smithy/invalid-dependency" "^3.0.9" + "@smithy/middleware-content-length" "^3.0.11" + "@smithy/middleware-endpoint" "^3.2.2" + "@smithy/middleware-retry" "^3.0.26" + "@smithy/middleware-serde" "^3.0.9" + "@smithy/middleware-stack" "^3.0.9" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/node-http-handler" "^3.3.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/smithy-client" "^3.4.3" + "@smithy/types" "^3.7.0" + "@smithy/url-parser" "^3.0.9" + "@smithy/util-base64" "^3.0.0" + "@smithy/util-body-length-browser" "^3.0.0" + "@smithy/util-body-length-node" "^3.0.0" + "@smithy/util-defaults-mode-browser" "^3.0.26" + "@smithy/util-defaults-mode-node" "^3.0.26" + "@smithy/util-endpoints" "^2.1.5" + "@smithy/util-middleware" "^3.0.9" + "@smithy/util-retry" "^3.0.9" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/core@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/core/-/core-3.693.0.tgz#437969dd740895a59863d737bad14646bc2e1725" + integrity sha512-v6Z/kWmLFqRLDPEwl9hJGhtTgIFHjZugSfF1Yqffdxf4n1AWgtHS7qSegakuMyN5pP4K2tvUD8qHJ+gGe2Bw2A== + dependencies: + "@aws-sdk/types" "3.692.0" + "@smithy/core" "^2.5.2" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/property-provider" "^3.1.9" + "@smithy/protocol-http" "^4.1.6" + "@smithy/signature-v4" "^4.2.2" + "@smithy/smithy-client" "^3.4.3" + "@smithy/types" "^3.7.0" + "@smithy/util-middleware" "^3.0.9" + fast-xml-parser "4.4.1" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-env@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.693.0.tgz#f97feed9809fe2800216943470015fdaaba47c4f" + integrity sha512-hMUZaRSF7+iBKZfBHNLihFs9zvpM1CB8MBOTnTp5NGCVkRYF3SB2LH+Kcippe0ats4qCyB1eEoyQX99rERp2iQ== + dependencies: + "@aws-sdk/core" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/property-provider" "^3.1.9" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-http@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-http/-/credential-provider-http-3.693.0.tgz#5caad0ac47eded1edeb63f907280580ccfaadba3" + integrity sha512-sL8MvwNJU7ZpD7/d2VVb3by1GknIJUxzTIgYtVkDVA/ojo+KRQSSHxcj0EWWXF5DTSh2Tm+LrEug3y1ZyKHsDA== + dependencies: + "@aws-sdk/core" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/fetch-http-handler" "^4.1.0" + "@smithy/node-http-handler" "^3.3.0" + "@smithy/property-provider" "^3.1.9" + "@smithy/protocol-http" "^4.1.6" + "@smithy/smithy-client" "^3.4.3" + "@smithy/types" "^3.7.0" + "@smithy/util-stream" "^3.3.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-ini@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.693.0.tgz#b4557ac1092657660a15c9bd55e17c27f79ec621" + integrity sha512-kvaa4mXhCCOuW7UQnBhYqYfgWmwy7WSBSDClutwSLPZvgrhYj2l16SD2lN4IfYdxARYMJJ1lFYp3/jJG/9Yk4Q== + dependencies: + "@aws-sdk/core" "3.693.0" + "@aws-sdk/credential-provider-env" "3.693.0" + "@aws-sdk/credential-provider-http" "3.693.0" + "@aws-sdk/credential-provider-process" "3.693.0" + "@aws-sdk/credential-provider-sso" "3.693.0" + "@aws-sdk/credential-provider-web-identity" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/credential-provider-imds" "^3.2.6" + "@smithy/property-provider" "^3.1.9" + "@smithy/shared-ini-file-loader" "^3.1.10" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-node@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.693.0.tgz#c5ceac64a69304d5b4db3fd68473480cafddb4a9" + integrity sha512-42WMsBjTNnjYxYuM3qD/Nq+8b7UdMopUq5OduMDxoM3mFTV6PXMMnfI4Z1TNnR4tYRvPXAnuNltF6xmjKbSJRA== + dependencies: + "@aws-sdk/credential-provider-env" "3.693.0" + "@aws-sdk/credential-provider-http" "3.693.0" + "@aws-sdk/credential-provider-ini" "3.693.0" + "@aws-sdk/credential-provider-process" "3.693.0" + "@aws-sdk/credential-provider-sso" "3.693.0" + "@aws-sdk/credential-provider-web-identity" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/credential-provider-imds" "^3.2.6" + "@smithy/property-provider" "^3.1.9" + "@smithy/shared-ini-file-loader" "^3.1.10" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-process@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.693.0.tgz#e84e945f1a148f06ff697608d5309e73347e5aa9" + integrity sha512-cvxQkrTWHHjeHrPlj7EWXPnFSq8x7vMx+Zn1oTsMpCY445N9KuzjfJTkmNGwU2GT6rSZI9/0MM02aQvl5bBBTQ== + dependencies: + "@aws-sdk/core" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/property-provider" "^3.1.9" + "@smithy/shared-ini-file-loader" "^3.1.10" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-sso@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.693.0.tgz#72767389f533d9d17a14af63daaafcc8368ab43a" + integrity sha512-479UlJxY+BFjj3pJFYUNC0DCMrykuG7wBAXfsvZqQxKUa83DnH5Q1ID/N2hZLkxjGd4ZW0AC3lTOMxFelGzzpQ== + dependencies: + "@aws-sdk/client-sso" "3.693.0" + "@aws-sdk/core" "3.693.0" + "@aws-sdk/token-providers" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/property-provider" "^3.1.9" + "@smithy/shared-ini-file-loader" "^3.1.10" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/credential-provider-web-identity@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.693.0.tgz#b6133b5ef9d3582e36e02e9c66766714ff672a11" + integrity sha512-8LB210Pr6VeCiSb2hIra+sAH4KUBLyGaN50axHtIgufVK8jbKIctTZcVY5TO9Se+1107TsruzeXS7VeqVdJfFA== + dependencies: + "@aws-sdk/core" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/property-provider" "^3.1.9" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-bucket-endpoint@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.693.0.tgz#e4823a40935d34f5e58a4fbc830d8ff92e44fc99" + integrity sha512-cPIa+lxMYiFRHtxKfNIVSFGO6LSgZCk42pu3d7KGwD6hu6vXRD5B2/DD3rPcEH1zgl2j0Kx1oGAV7SRXKHSFag== + dependencies: + "@aws-sdk/types" "3.692.0" + "@aws-sdk/util-arn-parser" "3.693.0" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/protocol-http" "^4.1.6" + "@smithy/types" "^3.7.0" + "@smithy/util-config-provider" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-expect-continue@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.693.0.tgz#d8696cee9ebea1d973d8daf872fd913b41d62cf0" + integrity sha512-MuK/gsJWpHz6Tv0CqTCS+QNOxLa2RfPh1biVCu/uO3l7kA0TjQ/C+tfgKvLXeH103tuDrOVINK+bt2ENmI3SWg== + dependencies: + "@aws-sdk/types" "3.692.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-flexible-checksums@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.693.0.tgz#80f07802d98ff33a6899a09c59cf51aab426aaac" + integrity sha512-xkS6zjuE11ob93H9t65kHzphXcUMnN2SmIm2wycUPg+hi8Q6DJA6U2p//6oXkrr9oHy1QvwtllRd7SAd63sFKQ== + dependencies: + "@aws-crypto/crc32" "5.2.0" + "@aws-crypto/crc32c" "5.2.0" + "@aws-crypto/util" "5.2.0" + "@aws-sdk/core" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/is-array-buffer" "^3.0.0" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/protocol-http" "^4.1.6" + "@smithy/types" "^3.7.0" + "@smithy/util-middleware" "^3.0.9" + "@smithy/util-stream" "^3.3.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-host-header@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.693.0.tgz#69322909c0792df1e6be7c7fb5e2b6f76090a55c" + integrity sha512-BCki6sAZ5jYwIN/t3ElCiwerHad69ipHwPsDCxJQyeiOnJ8HG+lEpnVIfrnI8A0fLQNSF3Gtx6ahfBpKiv1Oug== + dependencies: + "@aws-sdk/types" "3.692.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-location-constraint@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.693.0.tgz#1856eaaad64d41d1f8fa53ced58a6c7cf5eccc6e" + integrity sha512-eDAExTZ9uNIP7vs2JCVCOuWJauGueisBSn+Ovt7UvvuEUp6KOIJqn8oFxWmyUQu2GvbG4OcaTLgbqD95YHTB0Q== + dependencies: + "@aws-sdk/types" "3.692.0" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-logger@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.693.0.tgz#fc10294e6963f8e5d58ba1ededd891e999f544a9" + integrity sha512-dXnXDPr+wIiJ1TLADACI1g9pkSB21KkMIko2u4CJ2JCBoxi5IqeTnVoa6YcC8GdFNVRl+PorZ3Zqfmf1EOTC6w== + dependencies: + "@aws-sdk/types" "3.692.0" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-recursion-detection@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.693.0.tgz#88a8157293775e7116707da26501da4b5e042f51" + integrity sha512-0LDmM+VxXp0u3rG0xQRWD/q6Ubi7G8I44tBPahevD5CaiDZTkmNTrVUf0VEJgVe0iCKBppACMBDkLB0/ETqkFw== + dependencies: + "@aws-sdk/types" "3.692.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-sdk-s3@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.693.0.tgz#e0850854d5079f372786b2ccfe85729caa7a49d8" + integrity sha512-5A++RBjJ3guyq5pbYs+Oq5hMlA8CK2OWaHx09cxVfhHWl/RoaY8DXrft4gnhoUEBrrubyMw7r9j7RIMLvS58kg== + dependencies: + "@aws-sdk/core" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@aws-sdk/util-arn-parser" "3.693.0" + "@smithy/core" "^2.5.2" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/protocol-http" "^4.1.6" + "@smithy/signature-v4" "^4.2.2" + "@smithy/smithy-client" "^3.4.3" + "@smithy/types" "^3.7.0" + "@smithy/util-config-provider" "^3.0.0" + "@smithy/util-middleware" "^3.0.9" + "@smithy/util-stream" "^3.3.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-ssec@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-ssec/-/middleware-ssec-3.693.0.tgz#2ff779147d188090b3a6cda3ed12ca4085220a73" + integrity sha512-Ro5vzI7SRgEeuoMk3fKqFjGv6mG4c7VsSCDwnkiasmafQFBTPvUIpgmu2FXMHqW/OthvoiOzpSrlJ9Bwlx2f8A== + dependencies: + "@aws-sdk/types" "3.692.0" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/middleware-user-agent@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.693.0.tgz#4b55cfab3fc7e671b08e1ea63a98e45a1e13e6a5" + integrity sha512-/KUq/KEpFFbQmNmpp7SpAtFAdViquDfD2W0QcG07zYBfz9MwE2ig48ALynXm5sMpRmnG7sJXjdvPtTsSVPfkiw== + dependencies: + "@aws-sdk/core" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@aws-sdk/util-endpoints" "3.693.0" + "@smithy/core" "^2.5.2" + "@smithy/protocol-http" "^4.1.6" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" + +"@aws-sdk/node-http-handler@^3.374.0": + version "3.374.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/node-http-handler/-/node-http-handler-3.374.0.tgz#8cd58b4d9814713e26034c12eabc119c113a5bc4" + integrity sha512-v1Z6m0wwkf65/tKuhwrtPRqVoOtNkDTRn2MBMtxCwEw+8V8Q+YRFqVgGN+J1n53ktE0G5OYVBux/NHiAjJHReQ== + dependencies: + "@smithy/node-http-handler" "^1.0.2" tslib "^2.5.0" -"@aws-sdk/client-sso@3.423.0": - version "3.423.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.423.0.tgz#99db1f73419443cef544892337a1344aba10abc2" - integrity "sha1-mdsfc0GUQ871RIkjN6E0SroQq8I= sha512-znIufHkwhCIePgaYciIs3x/+BpzR57CZzbCKHR9+oOvGyufEPPpUT5bFLvbwTgfiVkTjuk6sG/ES3U5Bc+xtrA==" +"@aws-sdk/region-config-resolver@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.693.0.tgz#9cde5e99f654c788540acfb2a4218d444e8621c2" + integrity sha512-YLUkMsUY0GLW/nfwlZ69cy1u07EZRmsv8Z9m0qW317/EZaVx59hcvmcvb+W4bFqj5E8YImTjoGfE4cZ0F9mkyw== dependencies: - "@aws-crypto/sha256-browser" "3.0.0" - "@aws-crypto/sha256-js" "3.0.0" - "@aws-sdk/middleware-host-header" "3.418.0" - "@aws-sdk/middleware-logger" "3.418.0" - "@aws-sdk/middleware-recursion-detection" "3.418.0" - "@aws-sdk/middleware-user-agent" "3.418.0" - "@aws-sdk/region-config-resolver" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@aws-sdk/util-endpoints" "3.418.0" - "@aws-sdk/util-user-agent-browser" "3.418.0" - "@aws-sdk/util-user-agent-node" "3.418.0" - "@smithy/config-resolver" "^2.0.10" - "@smithy/fetch-http-handler" "^2.1.5" - "@smithy/hash-node" "^2.0.9" - "@smithy/invalid-dependency" "^2.0.9" - "@smithy/middleware-content-length" "^2.0.11" - "@smithy/middleware-endpoint" "^2.0.9" - "@smithy/middleware-retry" "^2.0.12" - "@smithy/middleware-serde" "^2.0.9" - "@smithy/middleware-stack" "^2.0.2" - "@smithy/node-config-provider" "^2.0.12" - "@smithy/node-http-handler" "^2.1.5" - "@smithy/protocol-http" "^3.0.5" - "@smithy/smithy-client" "^2.1.6" - "@smithy/types" "^2.3.3" - "@smithy/url-parser" "^2.0.9" - "@smithy/util-base64" "^2.0.0" - "@smithy/util-body-length-browser" "^2.0.0" - "@smithy/util-body-length-node" "^2.1.0" - "@smithy/util-defaults-mode-browser" "^2.0.10" - "@smithy/util-defaults-mode-node" "^2.0.12" - "@smithy/util-retry" "^2.0.2" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" + "@aws-sdk/types" "3.692.0" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/types" "^3.7.0" + "@smithy/util-config-provider" "^3.0.0" + "@smithy/util-middleware" "^3.0.9" + tslib "^2.6.2" -"@aws-sdk/client-sts@3.423.0": - version "3.423.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/client-sts/-/client-sts-3.423.0.tgz#530a9cd58baef40cc6bbc6321c6ed93175e0e5b2" - integrity "sha1-Uwqc1Yuu9AzGu8YyHG7ZMXXg5bI= sha512-EcpkKu02QZbRX6dQE0u7a8RgWrn/5riz1qAlKd7rM8FZJpr/D6GGX8ZzWxjgp7pRUgfNvinTmIudDnyQY3v9Mg==" +"@aws-sdk/signature-v4-multi-region@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.693.0.tgz#85bd90bb78be1a98d5a5ca41033cb0703146c2c4" + integrity sha512-s7zbbsoVIriTR4ZGaateKuTqz6ddpazAyHvjk7I9kd+NvGNPiuAI18UdbuiiRI6K5HuYKf1ah6mKWFGPG15/kQ== dependencies: - "@aws-crypto/sha256-browser" "3.0.0" - "@aws-crypto/sha256-js" "3.0.0" - "@aws-sdk/credential-provider-node" "3.423.0" - "@aws-sdk/middleware-host-header" "3.418.0" - "@aws-sdk/middleware-logger" "3.418.0" - "@aws-sdk/middleware-recursion-detection" "3.418.0" - "@aws-sdk/middleware-sdk-sts" "3.418.0" - "@aws-sdk/middleware-signing" "3.418.0" - "@aws-sdk/middleware-user-agent" "3.418.0" - "@aws-sdk/region-config-resolver" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@aws-sdk/util-endpoints" "3.418.0" - "@aws-sdk/util-user-agent-browser" "3.418.0" - "@aws-sdk/util-user-agent-node" "3.418.0" - "@smithy/config-resolver" "^2.0.10" - "@smithy/fetch-http-handler" "^2.1.5" - "@smithy/hash-node" "^2.0.9" - "@smithy/invalid-dependency" "^2.0.9" - "@smithy/middleware-content-length" "^2.0.11" - "@smithy/middleware-endpoint" "^2.0.9" - "@smithy/middleware-retry" "^2.0.12" - "@smithy/middleware-serde" "^2.0.9" - "@smithy/middleware-stack" "^2.0.2" - "@smithy/node-config-provider" "^2.0.12" - "@smithy/node-http-handler" "^2.1.5" - "@smithy/protocol-http" "^3.0.5" - "@smithy/smithy-client" "^2.1.6" - "@smithy/types" "^2.3.3" - "@smithy/url-parser" "^2.0.9" - "@smithy/util-base64" "^2.0.0" - "@smithy/util-body-length-browser" "^2.0.0" - "@smithy/util-body-length-node" "^2.1.0" - "@smithy/util-defaults-mode-browser" "^2.0.10" - "@smithy/util-defaults-mode-node" "^2.0.12" - "@smithy/util-retry" "^2.0.2" - "@smithy/util-utf8" "^2.0.0" - fast-xml-parser "4.2.5" - tslib "^2.5.0" + "@aws-sdk/middleware-sdk-s3" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/protocol-http" "^4.1.6" + "@smithy/signature-v4" "^4.2.2" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" -"@aws-sdk/credential-provider-env@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-env/-/credential-provider-env-3.418.0.tgz#7b14169350d9c14c9f656da06edf46f40a224ed2" - integrity "sha1-exQWk1DZwUyfZW2gbt9G9AoiTtI= sha512-e74sS+x63EZUBO+HaI8zor886YdtmULzwKdctsZp5/37Xho1CVUNtEC+fYa69nigBD9afoiH33I4JggaHgrekQ==" +"@aws-sdk/token-providers@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.693.0.tgz#5ce7d6aa7a3437d4abdc0dca1be47f5158d15c85" + integrity sha512-nDBTJMk1l/YmFULGfRbToOA2wjf+FkQT4dMgYCv+V9uSYsMzQj8A7Tha2dz9yv4vnQgYaEiErQ8d7HVyXcVEoA== dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/property-provider" "^2.0.0" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" + "@aws-sdk/types" "3.692.0" + "@smithy/property-provider" "^3.1.9" + "@smithy/shared-ini-file-loader" "^3.1.10" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" -"@aws-sdk/credential-provider-ini@3.423.0": - version "3.423.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-ini/-/credential-provider-ini-3.423.0.tgz#62690a3c49b0223c3d239c8a3d2f2708e967a767" - integrity "sha1-YmkKPEmwIjw9I5yKPS8nCOlnp2c= sha512-7CsFWz8g7dQmblp57XzzxMirO4ClowGZIOwAheBkmk6q1XHbllcHFnbh2kdPyQQ0+JmjDg6waztIc7dY7Ycfvw==" +"@aws-sdk/types@3.692.0", "@aws-sdk/types@^3.222.0": + version "3.692.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.692.0.tgz#c8f6c75b6ad659865b72759796d4d92c1b72069b" + integrity sha512-RpNvzD7zMEhiKgmlxGzyXaEcg2khvM7wd5sSHVapOcrde1awQSOMGI4zKBQ+wy5TnDfrm170ROz/ERLYtrjPZA== dependencies: - "@aws-sdk/credential-provider-env" "3.418.0" - "@aws-sdk/credential-provider-process" "3.418.0" - "@aws-sdk/credential-provider-sso" "3.423.0" - "@aws-sdk/credential-provider-web-identity" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@smithy/credential-provider-imds" "^2.0.0" - "@smithy/property-provider" "^2.0.0" - "@smithy/shared-ini-file-loader" "^2.0.6" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" -"@aws-sdk/credential-provider-node@3.423.0": - version "3.423.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-node/-/credential-provider-node-3.423.0.tgz#80d05ea89b1a4f245786171ae516c331aa315908" - integrity "sha1-gNBeqJsaTyRXhhca5RbDMaoxWQg= sha512-lygbGJJUnDpgo8OEqdoYd51BKkyBVQ1Catiua/m0aHvL+SCmVrHiYPQPawWYGxpH8X3DXdXa0nd0LkEaevrHRg==" +"@aws-sdk/util-arn-parser@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-arn-parser/-/util-arn-parser-3.693.0.tgz#8dae27eb822ab4f88be28bb3c0fc11f1f13d3948" + integrity sha512-WC8x6ca+NRrtpAH64rWu+ryDZI3HuLwlEr8EU6/dbC/pt+r/zC0PBoC15VEygUaBA+isppCikQpGyEDu0Yj7gQ== dependencies: - "@aws-sdk/credential-provider-env" "3.418.0" - "@aws-sdk/credential-provider-ini" "3.423.0" - "@aws-sdk/credential-provider-process" "3.418.0" - "@aws-sdk/credential-provider-sso" "3.423.0" - "@aws-sdk/credential-provider-web-identity" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@smithy/credential-provider-imds" "^2.0.0" - "@smithy/property-provider" "^2.0.0" - "@smithy/shared-ini-file-loader" "^2.0.6" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" + tslib "^2.6.2" -"@aws-sdk/credential-provider-process@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-process/-/credential-provider-process-3.418.0.tgz#1cb6d816bd471db3f9724715b007035ef18b5b2b" - integrity "sha1-HLbYFr1HHbP5ckcVsAcDXvGLWys= sha512-xPbdm2WKz1oH6pTkrJoUmr3OLuqvvcPYTQX0IIlc31tmDwDWPQjXGGFD/vwZGIZIkKaFpFxVMgAzfFScxox7dw==" +"@aws-sdk/util-endpoints@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.693.0.tgz#99f56f83fc25bdc3321f5871d6354abd56768891" + integrity sha512-eo4F6DRQ/kxS3gxJpLRv+aDNy76DxQJL5B3DPzpr9Vkq0ygVoi4GT5oIZLVaAVIJmi6k5qq9dLsYZfWLUxJJSg== dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/property-provider" "^2.0.0" - "@smithy/shared-ini-file-loader" "^2.0.6" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/credential-provider-sso@3.423.0": - version "3.423.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-sso/-/credential-provider-sso-3.423.0.tgz#a04f1715e5d9c75370d17ceac645379ca57cbb0b" - integrity "sha1-oE8XFeXZx1Nw0XzqxkU3nKV8uws= sha512-zAH68IjRMmW22USbsCVQ5Q6AHqhmWABwLbZAMocSGMasddTGv/nkA/nUiVCJ/B4LI3P81FoPQVrG5JxNmkNH0w==" - dependencies: - "@aws-sdk/client-sso" "3.423.0" - "@aws-sdk/token-providers" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@smithy/property-provider" "^2.0.0" - "@smithy/shared-ini-file-loader" "^2.0.6" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/credential-provider-web-identity@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/credential-provider-web-identity/-/credential-provider-web-identity-3.418.0.tgz#c2aed2a79bf193c1fef2b98391aaa9de7336aaaf" - integrity "sha1-wq7Sp5vxk8H+8rmDkaqp3nM2qq8= sha512-do7ang565n9p3dS1JdsQY01rUfRx8vkxQqz5M8OlcEHBNiCdi2PvSjNwcBdrv/FKkyIxZb0TImOfBSt40hVdxQ==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/property-provider" "^2.0.0" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-bucket-endpoint@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-bucket-endpoint/-/middleware-bucket-endpoint-3.418.0.tgz#1c330fb4dd583454872db7eba3b6e06c0699d59d" - integrity "sha1-HDMPtN1YNFSHLbfro7bgbAaZ1Z0= sha512-gj/mj1UfbKkGbQ1N4YUvjTTp8BVs5fO1QAL2AjFJ+jfJOToLReX72aNEkm7sPGbHML0TqOY4cQbJuWYy+zdD5g==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@aws-sdk/util-arn-parser" "3.310.0" - "@smithy/node-config-provider" "^2.0.12" - "@smithy/protocol-http" "^3.0.5" - "@smithy/types" "^2.3.3" - "@smithy/util-config-provider" "^2.0.0" - tslib "^2.5.0" - -"@aws-sdk/middleware-expect-continue@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-expect-continue/-/middleware-expect-continue-3.418.0.tgz#b621c6a8bc281f23bfd3791eaab25f687946d4a7" - integrity "sha1-tiHGqLwoHyO/03keqrJfaHlG1Kc= sha512-6x4rcIj685EmqDLQkbWoCur3Dg5DRClHMen6nHXmD3CR5Xyt3z1Gk/+jmZICxyJo9c6M4AeZht8o95BopkmYAQ==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-flexible-checksums@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-flexible-checksums/-/middleware-flexible-checksums-3.418.0.tgz#a79f44739ec918d8947294d0acc52eb7eb358773" - integrity "sha1-p59Ec57JGNiUcpTQrMUut+s1h3M= sha512-3O203dqS2JU5P1TAAbo7p1qplXQh59pevw9nqzPVb3EG8B+mSucVf2kKmF7kGHqKSk+nK/mB/4XGSsZBzGt6Wg==" - dependencies: - "@aws-crypto/crc32" "3.0.0" - "@aws-crypto/crc32c" "3.0.0" - "@aws-sdk/types" "3.418.0" - "@smithy/is-array-buffer" "^2.0.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/types" "^2.3.3" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" - -"@aws-sdk/middleware-host-header@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-host-header/-/middleware-host-header-3.418.0.tgz#35d682e14f36c9d9d7464c7c1dd582bf6611436d" - integrity "sha1-NdaC4U82ydnXRkx8HdWCv2YRQ20= sha512-LrMTdzalkPw/1ujLCKPLwCGvPMCmT4P+vOZQRbSEVZPnlZk+Aj++aL/RaHou0jL4kJH3zl8iQepriBt4a7UvXQ==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-location-constraint@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-location-constraint/-/middleware-location-constraint-3.418.0.tgz#e62e213a72ce583ba6135db51dcc60d07825b8ee" - integrity "sha1-5i4hOnLOWDumE121Hcxg0HgluO4= sha512-cc8M3VEaESHJhDsDV8tTpt2QYUprDWhvAVVSlcL43cTdZ54Quc0W+toDiaVOUlwrAZz2Y7g5NDj22ibJGFbOvw==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-logger@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-logger/-/middleware-logger-3.418.0.tgz#08d7419f4220c36032a070a7dbb8bbf7e744a9ce" - integrity "sha1-CNdBn0Igw2AyoHCn27i79+dEqc4= sha512-StKGmyPVfoO/wdNTtKemYwoJsqIl4l7oqarQY7VSf2Mp3mqaa+njLViHsQbirYpyqpgUEusOnuTlH5utxJ1NsQ==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-recursion-detection@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-recursion-detection/-/middleware-recursion-detection-3.418.0.tgz#2bb80d084f946846ad4907f3d6e0b451787d62b1" - integrity "sha1-K7gNCE+UaEatSQfz1uC0UXh9YrE= sha512-kKFrIQglBLUFPbHSDy1+bbe3Na2Kd70JSUC3QLMbUHmqipXN8KeXRfAj7vTv97zXl0WzG0buV++WcNwOm1rFjg==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-sdk-s3@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-s3/-/middleware-sdk-s3-3.418.0.tgz#b1de52d54e0cbc8d46ce0bc4c6c54b527f409aaf" - integrity "sha1-sd5S1U4MvI1GzgvExsVLUn9Amq8= sha512-rei32LF45SyqL3NlWDjEOfMwAca9A5F4QgUyXJqvASc43oWC1tJnLIhiCxNh8qkWAiRyRzFpcanTeqyaRSsZpA==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@aws-sdk/util-arn-parser" "3.310.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/smithy-client" "^2.1.6" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-sdk-sts@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-sdk-sts/-/middleware-sdk-sts-3.418.0.tgz#f167f16050e055282ddd60226a2216c84873d464" - integrity "sha1-8WfxYFDgVSgt3WAiaiIWyEhz1GQ= sha512-cW8ijrCTP+mgihvcq4+TbhAcE/we5lFl4ydRqvTdtcSnYQAVQADg47rnTScQiFsPFEB3NKq7BGeyTJF9MKolPA==" - dependencies: - "@aws-sdk/middleware-signing" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-signing@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-signing/-/middleware-signing-3.418.0.tgz#c7242b84069067bb671cb4191d412b59713a375e" - integrity "sha1-xyQrhAaQZ7tnHLQZHUErWXE6N14= sha512-onvs5KoYQE8OlOE740RxWBGtsUyVIgAo0CzRKOQO63ZEYqpL1Os+MS1CGzdNhvQnJgJruE1WW+Ix8fjN30zKPA==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/property-provider" "^2.0.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/signature-v4" "^2.0.0" - "@smithy/types" "^2.3.3" - "@smithy/util-middleware" "^2.0.2" - tslib "^2.5.0" - -"@aws-sdk/middleware-ssec@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-ssec/-/middleware-ssec-3.418.0.tgz#67b554c4acad81c7aa93421c8fcba8a18e138294" - integrity "sha1-Z7VUxKytgceqk0Icj8uooY4TgpQ= sha512-J7K+5h6aP7IYMlu/NwHEIjb0+WDu1eFvO8TCPo6j1H9xYRi8B/6h+6pa9Rk9IgRUzFnrdlDu9FazG8Tp0KKLyg==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/middleware-user-agent@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/middleware-user-agent/-/middleware-user-agent-3.418.0.tgz#37426cf801332165fb170b1fd62dea8bb967a1ef" - integrity "sha1-N0Js+AEzIWX7Fwsf1i3qi7lnoe8= sha512-Jdcztg9Tal9SEAL0dKRrnpKrm6LFlWmAhvuwv0dQ7bNTJxIxyEFbpqdgy7mpQHsLVZgq1Aad/7gT/72c9igyZw==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@aws-sdk/util-endpoints" "3.418.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/region-config-resolver@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/region-config-resolver/-/region-config-resolver-3.418.0.tgz#53b99e4bd92f3369f51e9a76534b7d884db67526" - integrity "sha1-U7meS9kvM2n1Hpp2U0t9iE22dSY= sha512-lJRZ/9TjZU6yLz+mAwxJkcJZ6BmyYoIJVo1p5+BN//EFdEmC8/c0c9gXMRzfISV/mqWSttdtccpAyN4/goHTYA==" - dependencies: - "@smithy/node-config-provider" "^2.0.12" - "@smithy/types" "^2.3.3" - "@smithy/util-config-provider" "^2.0.0" - "@smithy/util-middleware" "^2.0.2" - tslib "^2.5.0" - -"@aws-sdk/signature-v4-multi-region@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/signature-v4-multi-region/-/signature-v4-multi-region-3.418.0.tgz#984c8fc948c61a7ad02f1ccc6c2ddecf43a265b1" - integrity "sha1-mEyPyUjGGnrQLxzMbC3ez0OiZbE= sha512-LeVYMZeUQUURFqDf4yZxTEv016g64hi0LqYBjU0mjwd8aPc0k6hckwvshezc80jCNbuLyjNfQclvlg3iFliItQ==" - dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/signature-v4" "^2.0.0" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/token-providers@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/token-providers/-/token-providers-3.418.0.tgz#cbfac922df397e72daf6dbdd8c1e9a140df0aa0e" - integrity "sha1-y/rJIt85fnLa9tvdjB6aFA3wqg4= sha512-9P7Q0VN0hEzTngy3Sz5eya2qEOEf0Q8qf1vB3um0gE6ID6EVAdz/nc/DztfN32MFxk8FeVBrCP5vWdoOzmd72g==" - dependencies: - "@aws-crypto/sha256-browser" "3.0.0" - "@aws-crypto/sha256-js" "3.0.0" - "@aws-sdk/middleware-host-header" "3.418.0" - "@aws-sdk/middleware-logger" "3.418.0" - "@aws-sdk/middleware-recursion-detection" "3.418.0" - "@aws-sdk/middleware-user-agent" "3.418.0" - "@aws-sdk/types" "3.418.0" - "@aws-sdk/util-endpoints" "3.418.0" - "@aws-sdk/util-user-agent-browser" "3.418.0" - "@aws-sdk/util-user-agent-node" "3.418.0" - "@smithy/config-resolver" "^2.0.10" - "@smithy/fetch-http-handler" "^2.1.5" - "@smithy/hash-node" "^2.0.9" - "@smithy/invalid-dependency" "^2.0.9" - "@smithy/middleware-content-length" "^2.0.11" - "@smithy/middleware-endpoint" "^2.0.9" - "@smithy/middleware-retry" "^2.0.12" - "@smithy/middleware-serde" "^2.0.9" - "@smithy/middleware-stack" "^2.0.2" - "@smithy/node-config-provider" "^2.0.12" - "@smithy/node-http-handler" "^2.1.5" - "@smithy/property-provider" "^2.0.0" - "@smithy/protocol-http" "^3.0.5" - "@smithy/shared-ini-file-loader" "^2.0.6" - "@smithy/smithy-client" "^2.1.6" - "@smithy/types" "^2.3.3" - "@smithy/url-parser" "^2.0.9" - "@smithy/util-base64" "^2.0.0" - "@smithy/util-body-length-browser" "^2.0.0" - "@smithy/util-body-length-node" "^2.1.0" - "@smithy/util-defaults-mode-browser" "^2.0.10" - "@smithy/util-defaults-mode-node" "^2.0.12" - "@smithy/util-retry" "^2.0.2" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" - -"@aws-sdk/types@3.418.0", "@aws-sdk/types@^3.222.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/types/-/types-3.418.0.tgz#c23213110b0c313d5546c810da032a441682f49a" - integrity "sha1-wjITEQsMMT1VRsgQ2gMqRBaC9Jo= sha512-y4PQSH+ulfFLY0+FYkaK4qbIaQI9IJNMO2xsxukW6/aNoApNymN1D2FSi2la8Qbp/iPjNDKsG8suNPm9NtsWXQ==" - dependencies: - "@smithy/types" "^2.3.3" - tslib "^2.5.0" - -"@aws-sdk/util-arn-parser@3.310.0": - version "3.310.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-arn-parser/-/util-arn-parser-3.310.0.tgz#861ff8810851be52a320ec9e4786f15b5fc74fba" - integrity "sha1-hh/4gQhRvlKjIOyeR4bxW1/HT7o= sha512-jL8509owp/xB9+Or0pvn3Fe+b94qfklc2yPowZZIFAkFcCSIdkIglz18cPDWnYAcy9JGewpMS1COXKIUhZkJsA==" - dependencies: - tslib "^2.5.0" - -"@aws-sdk/util-endpoints@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-endpoints/-/util-endpoints-3.418.0.tgz#462c976f054fe260562d4d2844152a04dd883fd7" - integrity "sha1-RiyXbwVP4mBWLU0oRBUqBN2IP9c= sha512-sYSDwRTl7yE7LhHkPzemGzmIXFVHSsi3AQ1KeNEk84eBqxMHHcCc2kqklaBk2roXWe50QDgRMy1ikZUxvtzNHQ==" - dependencies: - "@aws-sdk/types" "3.418.0" - tslib "^2.5.0" + "@aws-sdk/types" "3.692.0" + "@smithy/types" "^3.7.0" + "@smithy/util-endpoints" "^2.1.5" + tslib "^2.6.2" "@aws-sdk/util-locate-window@^3.0.0": - version "3.310.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.310.0.tgz#b071baf050301adee89051032bd4139bba32cc40" - integrity "sha1-sHG68FAwGt7okFEDK9QTm7oyzEA= sha512-qo2t/vBTnoXpjKxlsC2e1gBrRm80M3bId27r0BRB2VniSSe7bL1mmzM+/HFtujm0iAxtPM+aLEflLJlJeDPg0w==" + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-locate-window/-/util-locate-window-3.693.0.tgz#1160f6d055cf074ca198eb8ecf89b6311537ad6c" + integrity sha512-ttrag6haJLWABhLqtg1Uf+4LgHWIMOVSYL+VYZmAp2v4PUGOwWmWQH0Zk8RM7YuQcLfH/EoR72/Yxz6A4FKcuw== dependencies: - tslib "^2.5.0" + tslib "^2.6.2" -"@aws-sdk/util-user-agent-browser@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.418.0.tgz#dc76b8e7e5cae3f827d68cd4a3ee30c0d475a39c" - integrity "sha1-3Ha45+XK4/gn1ozUo+4wwNR1o5w= sha512-c4p4mc0VV/jIeNH0lsXzhJ1MpWRLuboGtNEpqE4s1Vl9ck2amv9VdUUZUmHbg+bVxlMgRQ4nmiovA4qIrqGuyg==" +"@aws-sdk/util-user-agent-browser@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-browser/-/util-user-agent-browser-3.693.0.tgz#c6969be97e7cd0190b3b72a82a642b29ff4659c4" + integrity sha512-6EUfuKOujtddy18OLJUaXfKBgs+UcbZ6N/3QV4iOkubCUdeM1maIqs++B9bhCbWeaeF5ORizJw5FTwnyNjE/mw== dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/types" "^2.3.3" + "@aws-sdk/types" "3.692.0" + "@smithy/types" "^3.7.0" bowser "^2.11.0" - tslib "^2.5.0" + tslib "^2.6.2" -"@aws-sdk/util-user-agent-node@3.418.0": - version "3.418.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.418.0.tgz#7d5a1c82ce3265ff0f70b13d58d08593113ab99a" - integrity "sha1-fVocgs4yZf8PcLE9WNCFkxE6uZo= sha512-BXMskXFtg+dmzSCgmnWOffokxIbPr1lFqa1D9kvM3l3IFRiFGx2IyDg+8MAhq11aPDLvoa/BDuQ0Yqma5izOhg==" +"@aws-sdk/util-user-agent-node@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/util-user-agent-node/-/util-user-agent-node-3.693.0.tgz#b26c806faa2001d4fa1d515b146eeff411513dd9" + integrity sha512-td0OVX8m5ZKiXtecIDuzY3Y3UZIzvxEr57Hp21NOwieqKCG2UeyQWWeGPv0FQaU7dpTkvFmVNI+tx9iB8V/Nhg== dependencies: - "@aws-sdk/types" "3.418.0" - "@smithy/node-config-provider" "^2.0.12" - "@smithy/types" "^2.3.3" - tslib "^2.5.0" + "@aws-sdk/middleware-user-agent" "3.693.0" + "@aws-sdk/types" "3.692.0" + "@smithy/node-config-provider" "^3.1.10" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" -"@aws-sdk/util-utf8-browser@^3.0.0": - version "3.259.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/util-utf8-browser/-/util-utf8-browser-3.259.0.tgz#3275a6f5eb334f96ca76635b961d3c50259fd9ff" - integrity "sha1-MnWm9eszT5bKdmNblh08UCWf2f8= sha512-UvFa/vR+e19XookZF8RzFZBrw2EUkQWxiBW0yYQAhvk3C+QVGl0H3ouca8LDBlBfQKXwmW3huo/59H8rwb1wJw==" +"@aws-sdk/xml-builder@3.693.0": + version "3.693.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.693.0.tgz#709a46a3335b71144d9f7917a7cb3033b5a04e82" + integrity sha512-C/rPwJcqnV8VDr2/VtcQnymSpcfEEgH1Jm6V0VmfXNZFv4Qzf1eCS8nsec0gipYgZB+cBBjfXw5dAk6pJ8ubpw== dependencies: - tslib "^2.3.1" - -"@aws-sdk/xml-builder@3.310.0": - version "3.310.0" - resolved "https://registry.yarnpkg.com/@aws-sdk/xml-builder/-/xml-builder-3.310.0.tgz#f0236f2103b438d16117e0939a6305ad69b7ff76" - integrity "sha1-8CNvIQO0ONFhF+CTmmMFrWm3/3Y= sha512-TqELu4mOuSIKQCqj63fGVs86Yh+vBx5nHRpWKNUNhB2nPTpfbziTs5c1X358be3peVWA4wPxW7Nt53KIg1tnNw==" - dependencies: - tslib "^2.5.0" + "@smithy/types" "^3.7.0" + tslib "^2.6.2" "@azure/abort-controller@^1.0.0", "@azure/abort-controller@^1.0.4": version "1.1.0" @@ -652,7 +706,7 @@ dependencies: tslib "^2.6.2" -"@azure/core-auth@^1.3.0", "@azure/core-auth@^1.4.0", "@azure/core-auth@^1.5.0": +"@azure/core-auth@^1.3.0", "@azure/core-auth@^1.4.0": version "1.5.0" resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.5.0.tgz#a41848c5c31cb3b7c84c409885267d55a2c92e44" integrity sha512-udzoBuYG1VBoHVohDTrvKjyzel34zt77Bhp7dQntVGGD0ehVq48owENbBG8fIgkHRNUBQH5k1r0hpoMu5L8+kw== @@ -661,6 +715,15 @@ "@azure/core-util" "^1.1.0" tslib "^2.2.0" +"@azure/core-auth@^1.5.0", "@azure/core-auth@^1.7.2", "@azure/core-auth@^1.8.0": + version "1.9.0" + resolved "https://registry.yarnpkg.com/@azure/core-auth/-/core-auth-1.9.0.tgz#ac725b03fabe3c892371065ee9e2041bee0fd1ac" + integrity sha512-FPwHpZywuyasDSLMqJ6fhbOK3TqUdviZNF8OqRGA4W5Ewib2lEEZ+pBsYcBa88B2NGO/SEnYPGhyBqNlE8ilSw== + dependencies: + "@azure/abort-controller" "^2.0.0" + "@azure/core-util" "^1.11.0" + tslib "^2.6.2" + "@azure/core-client@^1.3.0", "@azure/core-client@^1.4.0", "@azure/core-client@^1.5.0": version "1.9.2" resolved "https://registry.yarnpkg.com/@azure/core-client/-/core-client-1.9.2.tgz#6fc69cee2816883ab6c5cdd653ee4f2ff9774f74" @@ -684,9 +747,9 @@ "@azure/core-rest-pipeline" "^1.3.0" "@azure/core-http@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@azure/core-http/-/core-http-3.0.0.tgz#345845f9ba479a5ee41efc3fd7a13e82d2a0ec47" - integrity sha512-BxI2SlGFPPz6J1XyZNIVUf0QZLBKFX+ViFjKOkzqD18J1zOINIQ8JSBKKr+i+v8+MB6LacL6Nn/sP/TE13+s2Q== + version "3.0.4" + resolved "https://registry.yarnpkg.com/@azure/core-http/-/core-http-3.0.4.tgz#024b2909bbc0f2fce08c74f97a21312c4f42e922" + integrity sha512-Fok9VVhMdxAFOtqiiAtg74fL0UJkt0z3D+ouUUxcRLzZNBioPRAMJFVxiWoJljYpXsRi4GDQHzQHDc9AiYaIUQ== dependencies: "@azure/abort-controller" "^1.0.0" "@azure/core-auth" "^1.3.0" @@ -701,7 +764,7 @@ tslib "^2.2.0" tunnel "^0.0.6" uuid "^8.3.0" - xml2js "^0.4.19" + xml2js "^0.5.0" "@azure/core-lro@^2.2.0": version "2.5.1" @@ -719,7 +782,21 @@ dependencies: tslib "^2.2.0" -"@azure/core-rest-pipeline@^1.1.0", "@azure/core-rest-pipeline@^1.3.0", "@azure/core-rest-pipeline@^1.8.0", "@azure/core-rest-pipeline@^1.9.1": +"@azure/core-rest-pipeline@^1.1.0": + version "1.18.0" + resolved "https://registry.yarnpkg.com/@azure/core-rest-pipeline/-/core-rest-pipeline-1.18.0.tgz#165f1cd9bb1060be3b6895742db3d1f1106271d3" + integrity sha512-QSoGUp4Eq/gohEFNJaUOwTN7BCc2nHTjjbm75JT0aD7W65PWM1H/tItz0GsABn22uaKyGxiMhWQLt2r+FGU89Q== + dependencies: + "@azure/abort-controller" "^2.0.0" + "@azure/core-auth" "^1.8.0" + "@azure/core-tracing" "^1.0.1" + "@azure/core-util" "^1.11.0" + "@azure/logger" "^1.0.0" + http-proxy-agent "^7.0.0" + https-proxy-agent "^7.0.0" + tslib "^2.6.2" + +"@azure/core-rest-pipeline@^1.3.0", "@azure/core-rest-pipeline@^1.8.0", "@azure/core-rest-pipeline@^1.9.1": version "1.11.0" resolved "https://registry.yarnpkg.com/@azure/core-rest-pipeline/-/core-rest-pipeline-1.11.0.tgz#fc0e8f56caac08a9d4ac91c07a6c5a360ea31c82" integrity sha512-nB4KXl6qAyJmBVLWA7SakT4tzpYZTCk4pvRBeI+Ye0WYSOrlTqlMhc4MSS/8atD3ufeYWdkN380LLoXlUUzThw== @@ -749,7 +826,7 @@ dependencies: tslib "^2.2.0" -"@azure/core-util@^1.0.0", "@azure/core-util@^1.1.0", "@azure/core-util@^1.1.1", "@azure/core-util@^1.3.0", "@azure/core-util@^1.6.1": +"@azure/core-util@^1.0.0", "@azure/core-util@^1.1.0", "@azure/core-util@^1.3.0", "@azure/core-util@^1.6.1": version "1.6.1" resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.6.1.tgz#fea221c4fa43c26543bccf799beb30c1c7878f5a" integrity sha512-h5taHeySlsV9qxuK64KZxy4iln1BtMYlNt5jbuEFN3UFSAd1EwKg/Gjl5a6tZ/W8t6li3xPnutOx7zbDyXnPmQ== @@ -757,7 +834,15 @@ "@azure/abort-controller" "^1.0.0" tslib "^2.2.0" -"@azure/identity@4.2.1", "@azure/identity@^3.4.1": +"@azure/core-util@^1.1.1", "@azure/core-util@^1.11.0": + version "1.11.0" + resolved "https://registry.yarnpkg.com/@azure/core-util/-/core-util-1.11.0.tgz#f530fc67e738aea872fbdd1cc8416e70219fada7" + integrity sha512-DxOSLua+NdpWoSqULhjDyAZTXFdP/LKkqtYuxxz1SCN289zk3OG8UOpnCQAz/tygyACBtWp/BoO72ptK7msY8g== + dependencies: + "@azure/abort-controller" "^2.0.0" + tslib "^2.6.2" + +"@azure/identity@4.2.1", "@azure/identity@^4.2.1": version "4.2.1" resolved "https://registry.yarnpkg.com/@azure/identity/-/identity-4.2.1.tgz#22b366201e989b7b41c0e1690e103bd579c31e4c" integrity sha512-U8hsyC9YPcEIzoaObJlRDvp7KiF0MGS7xcWbyJSVvXRkC/HXo1f0oYeBYmEvVgRfacw7GHf6D6yAoh9JHz6A5Q== @@ -802,23 +887,18 @@ tslib "^2.2.0" "@azure/msal-browser@^3.11.1": - version "3.25.0" - resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-3.25.0.tgz#7ce0949977bc9e0c58319f7090c44fe5537104d4" - integrity sha512-a0Y7pmSy8SC1s9bvwr+REvyAA1nQcITlZvkElM2gNUPYFTTNUTEdcpg73TmawNucyMdZ9xb/GFcuhrLOqYAzwg== + version "3.27.0" + resolved "https://registry.yarnpkg.com/@azure/msal-browser/-/msal-browser-3.27.0.tgz#b6f02f73c8e102d3f115009b4677539fb173fe2b" + integrity sha512-+b4ZKSD8+vslCtVRVetkegEhOFMLP3rxDWJY212ct+2r6jVg6OSQKc1Qz3kCoXo0FgwaXkb+76TMZfpHp8QtgA== dependencies: - "@azure/msal-common" "14.15.0" - -"@azure/msal-common@14.15.0": - version "14.15.0" - resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-14.15.0.tgz#0e27ac0bb88fe100f4f8d1605b64d5c268636a55" - integrity sha512-ImAQHxmpMneJ/4S8BRFhjt1MZ3bppmpRPYYNyzeQPeFN288YKbb8TmmISQEbtfkQ1BPASvYZU5doIZOPBAqENQ== + "@azure/msal-common" "14.16.0" "@azure/msal-common@14.16.0": version "14.16.0" resolved "https://registry.yarnpkg.com/@azure/msal-common/-/msal-common-14.16.0.tgz#f3470fcaec788dbe50859952cd499340bda23d7a" integrity sha512-1KOZj9IpcDSwpNiQNjt0jDYZpQvNZay7QAEi/5DLubay40iGYtLzya/jbjRPLyOTZhEKyL1MzPuw2HqBCjceYA== -"@azure/msal-node@^2.5.1": +"@azure/msal-node@^2.5.1", "@azure/msal-node@^2.9.2": version "2.16.2" resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-2.16.2.tgz#3eb768d36883ea6f9a939c0b5b467b518e78fffc" integrity sha512-An7l1hEr0w1HMMh1LU+rtDtqL7/jw74ORlc9Wnh06v7TU/xpG39/Zdr1ZJu3QpjUfKJ+E0/OXMW8DRSWTlh7qQ== @@ -827,19 +907,10 @@ jsonwebtoken "^9.0.0" uuid "^8.3.0" -"@azure/msal-node@^2.9.2": - version "2.15.0" - resolved "https://registry.yarnpkg.com/@azure/msal-node/-/msal-node-2.15.0.tgz#50bf8e692a6656027c073a75d877a8a478aafdfd" - integrity sha512-gVPW8YLz92ZeCibQH2QUw96odJoiM3k/ZPH3f2HxptozmH6+OnyyvKXo/Egg39HAM230akarQKHf0W74UHlh0Q== - dependencies: - "@azure/msal-common" "14.15.0" - jsonwebtoken "^9.0.0" - uuid "^8.3.0" - -"@azure/storage-blob@^12.11.0": - version "12.13.0" - resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.13.0.tgz#9209cbb5c2cd463fb967a0f2ae144ace20879160" - integrity sha512-t3Q2lvBMJucgTjQcP5+hvEJMAsJSk0qmAnjDLie2td017IiduZbbC9BOcFfmwzR6y6cJdZOuewLCNFmEx9IrXA== +"@azure/storage-blob@12.18.x": + version "12.18.0" + resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.18.0.tgz#9dd001c9aa5e972216f5af15131009086cfeb59e" + integrity sha512-BzBZJobMoDyjJsPRMLNHvqHycTGrT8R/dtcTx9qUFcqwSRfGVK9A/cZ7Nx38UQydT9usZGbaDCN75QRNjezSAA== dependencies: "@azure/abort-controller" "^1.0.0" "@azure/core-http" "^3.0.0" @@ -2125,7 +2196,7 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== -"@budibase/backend-core@2.33.2": +"@budibase/backend-core@3.2.7": version "0.0.0" dependencies: "@budibase/nano" "10.1.5" @@ -2134,12 +2205,12 @@ "@budibase/types" "0.0.0" "@techpass/passport-openidconnect" "0.3.3" aws-cloudfront-sign "3.0.2" - aws-sdk "2.1030.0" + aws-sdk "2.1692.0" bcrypt "5.1.0" bcryptjs "2.4.3" bull "4.10.1" correlation-id "4.0.0" - dd-trace "5.2.0" + dd-trace "5.23.0" dotenv "16.0.1" google-auth-library "^8.0.1" google-spreadsheet "npm:@budibase/google-spreadsheet@4.1.5" @@ -2157,8 +2228,8 @@ pino "8.11.0" pino-http "8.3.3" posthog-node "4.0.1" - pouchdb "7.3.0" - pouchdb-find "7.2.2" + pouchdb "9.0.0" + pouchdb-find "9.0.0" redlock "4.2.0" rotating-file-stream "3.1.0" sanitize-s3-objectkey "0.0.1" @@ -2209,15 +2280,15 @@ through2 "^2.0.0" "@budibase/pro@npm:@budibase/pro@latest": - version "2.33.2" - resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.33.2.tgz#5c2012f7b2bf0fd871cda1ad37ad7a0442c84658" - integrity sha512-lBB6Wfp6OIOHRlGq82WS9KxvEXRs/P2QlwJT0Aj9PhmkQFsnXm2r8d18f0xTGvcflD+iR7XGP/k56JlCanmhQg== + version "3.2.7" + resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-3.2.7.tgz#4dfc48f3e2ba3a3b235872e54c3de4de54ea7941" + integrity sha512-GRRaf1qSqQfoodjvKjBDvNOVHZrcSXF7so6Y9Xv/MiRTjDES5nmhIT5rL/PQ1+Mb+sPytYmMaJ2mlTtwqSQZgQ== dependencies: "@anthropic-ai/sdk" "^0.27.3" - "@budibase/backend-core" "2.33.2" - "@budibase/shared-core" "2.33.2" - "@budibase/string-templates" "2.33.2" - "@budibase/types" "2.33.2" + "@budibase/backend-core" "3.2.7" + "@budibase/shared-core" "3.2.7" + "@budibase/string-templates" "3.2.7" + "@budibase/types" "3.2.7" "@koa/router" "8.0.8" bull "4.10.1" dd-trace "5.2.0" @@ -2230,13 +2301,13 @@ scim-patch "^0.8.1" scim2-parse-filter "^0.2.8" -"@budibase/shared-core@2.33.2": +"@budibase/shared-core@3.2.7": version "0.0.0" dependencies: "@budibase/types" "0.0.0" cron-validate "1.4.5" -"@budibase/string-templates@2.33.2": +"@budibase/string-templates@3.2.7": version "0.0.0" dependencies: "@budibase/handlebars-helpers" "^0.13.2" @@ -2244,7 +2315,7 @@ handlebars "^4.7.8" lodash.clonedeep "^4.5.0" -"@budibase/types@2.33.2": +"@budibase/types@3.2.7": version "0.0.0" dependencies: scim-patch "^0.8.1" @@ -2356,10 +2427,10 @@ style-mod "^4.0.0" w3c-keyname "^2.2.4" -"@colors/colors@1.5.0": - version "1.5.0" - resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.5.0.tgz#bb504579c1cae923e6576a4f5da43d25f97bdbd9" - integrity sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ== +"@colors/colors@1.6.0", "@colors/colors@^1.6.0": + version "1.6.0" + resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0" + integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA== "@cspotcode/source-map-support@^0.8.0": version "0.8.1" @@ -2414,6 +2485,13 @@ dependencies: node-gyp-build "^3.9.0" +"@datadog/native-appsec@8.1.1": + version "8.1.1" + resolved "https://registry.yarnpkg.com/@datadog/native-appsec/-/native-appsec-8.1.1.tgz#76aa34697e6ecbd3d9ef7e6938d3cdcfa689b1f3" + integrity sha512-mf+Ym/AzET4FeUTXOs8hz0uLOSsVIUnavZPUx8YoKWK5lKgR2L+CLfEzOpjBwgFpDgbV8I1/vyoGelgGpsMKHA== + dependencies: + node-gyp-build "^3.9.0" + "@datadog/native-iast-rewriter@2.2.2": version "2.2.2" resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.2.2.tgz#3f7feaf6be1af4c83ad063065b8ed509bbaf11cb" @@ -2422,6 +2500,14 @@ lru-cache "^7.14.0" node-gyp-build "^4.5.0" +"@datadog/native-iast-rewriter@2.4.1": + version "2.4.1" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-rewriter/-/native-iast-rewriter-2.4.1.tgz#e8211f78c818906513fb96a549374da0382c7623" + integrity sha512-j3auTmyyn63e2y+SL28CGNy/l+jXQyh+pxqoGTacWaY5FW/dvo5nGQepAismgJ3qJ8VhQfVWRdxBSiT7wu9clw== + dependencies: + lru-cache "^7.14.0" + node-gyp-build "^4.5.0" + "@datadog/native-iast-taint-tracking@1.6.4": version "1.6.4" resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-1.6.4.tgz#16c21ad7c36a53420c0d3c5a3720731809cc7e98" @@ -2429,6 +2515,13 @@ dependencies: node-gyp-build "^3.9.0" +"@datadog/native-iast-taint-tracking@3.1.0": + version "3.1.0" + resolved "https://registry.yarnpkg.com/@datadog/native-iast-taint-tracking/-/native-iast-taint-tracking-3.1.0.tgz#7b2ed7f8fad212d65e5ab03bcdea8b42a3051b2e" + integrity sha512-rw6qSjmxmu1yFHVvZLXFt/rVq2tUZXocNogPLB8n7MPpA0jijNGb109WokWw5ITImiW91GcGDuBW6elJDVKouQ== + dependencies: + node-gyp-build "^3.9.0" + "@datadog/native-metrics@^2.0.0": version "2.0.0" resolved "https://registry.yarnpkg.com/@datadog/native-metrics/-/native-metrics-2.0.0.tgz#65bf03313ee419956361e097551db36173e85712" @@ -2448,6 +2541,17 @@ pprof-format "^2.0.7" source-map "^0.7.4" +"@datadog/pprof@5.3.0": + version "5.3.0" + resolved "https://registry.yarnpkg.com/@datadog/pprof/-/pprof-5.3.0.tgz#c2f58d328ecced7f99887f1a559d7fe3aecb9219" + integrity sha512-53z2Q3K92T6Pf4vz4Ezh8kfkVEvLzbnVqacZGgcbkP//q0joFzO8q00Etw1S6NdnCX0XmX08ULaF4rUI5r14mw== + dependencies: + delay "^5.0.0" + node-gyp-build "<4.0" + p-limit "^3.1.0" + pprof-format "^2.1.0" + source-map "^0.7.4" + "@datadog/sketches-js@^2.1.0": version "2.1.0" resolved "https://registry.yarnpkg.com/@datadog/sketches-js/-/sketches-js-2.1.0.tgz#8c7e8028a5fc22ad102fa542b0a446c956830455" @@ -2661,46 +2765,43 @@ google-gax "^4.3.3" protobufjs "^7.2.6" -"@google-cloud/paginator@^3.0.7": - version "3.0.7" - resolved "https://registry.yarnpkg.com/@google-cloud/paginator/-/paginator-3.0.7.tgz#fb6f8e24ec841f99defaebf62c75c2e744dd419b" - integrity "sha1-+2+OJOyEH5ne+uv2LHXC50TdQZs= sha512-jJNutk0arIQhmpUUQJPJErsojqo834KcyB6X7a1mxuic8i1tKXxde8E69IZxNZawRIlZdIK2QY4WALvlK5MzYQ==" +"@google-cloud/paginator@^5.0.0": + version "5.0.2" + resolved "https://registry.yarnpkg.com/@google-cloud/paginator/-/paginator-5.0.2.tgz#86ad773266ce9f3b82955a8f75e22cd012ccc889" + integrity sha512-DJS3s0OVH4zFDB1PzjxAsHqJT6sKVbRwwML0ZBP9PbU7Yebtu/7SWMRzvO2J3nUi9pRNITCfu4LJeooM2w4pjg== dependencies: arrify "^2.0.0" extend "^3.0.2" -"@google-cloud/projectify@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@google-cloud/projectify/-/projectify-3.0.0.tgz#302b25f55f674854dce65c2532d98919b118a408" - integrity "sha1-MCsl9V9nSFTc5lwlMtmJGbEYpAg= sha512-HRkZsNmjScY6Li8/kb70wjGlDDyLkVk3KvoEo9uIoxSjYLJasGiCch9+PqRVDOCGUFvEIqyogl+BeqILL4OJHA==" +"@google-cloud/projectify@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@google-cloud/projectify/-/projectify-4.0.0.tgz#d600e0433daf51b88c1fa95ac7f02e38e80a07be" + integrity sha512-MmaX6HeSvyPbWGwFq7mXdo0uQZLGBYCwziiLIGq5JVX+/bdI3SAq6bP98trV5eTWfLuvsMcIC1YJOF2vfteLFA== -"@google-cloud/promisify@^3.0.0": - version "3.0.1" - resolved "https://registry.yarnpkg.com/@google-cloud/promisify/-/promisify-3.0.1.tgz#8d724fb280f47d1ff99953aee0c1669b25238c2e" - integrity "sha1-jXJPsoD0fR/5mVOu4MFmmyUjjC4= sha512-z1CjRjtQyBOYL+5Qr9DdYIfrdLBe746jRTYfaYU6MeXkqp7UfYs/jX16lFFVzZ7PGEJvqZNqYUEtb1mvDww4pA==" +"@google-cloud/promisify@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@google-cloud/promisify/-/promisify-4.0.0.tgz#a906e533ebdd0f754dca2509933334ce58b8c8b1" + integrity sha512-Orxzlfb9c67A15cq2JQEyVc7wEsmFBmHjZWZYQMUyJ1qivXyMwdyNOs9odi79hze+2zqdTtu1E19IM/FtqZ10g== -"@google-cloud/storage@^6.9.3": - version "6.12.0" - resolved "https://registry.yarnpkg.com/@google-cloud/storage/-/storage-6.12.0.tgz#a5d3093cc075252dca5bd19a3cfda406ad3a9de1" - integrity "sha1-pdMJPMB1JS3KW9GaPP2kBq06neE= sha512-78nNAY7iiZ4O/BouWMWTD/oSF2YtYgYB3GZirn0To6eBOugjXVoK+GXgUXOl+HlqbAOyHxAVXOlsj3snfbQ1dw==" +"@google-cloud/storage@^7.7.0": + version "7.14.0" + resolved "https://registry.yarnpkg.com/@google-cloud/storage/-/storage-7.14.0.tgz#eda9715f68507949214af804c906eba6d168a214" + integrity sha512-H41bPL2cMfSi4EEnFzKvg7XSb7T67ocSXrmF7MPjfgFB0L6CKGzfIYJheAZi1iqXjz6XaCT1OBf6HCG5vDBTOQ== dependencies: - "@google-cloud/paginator" "^3.0.7" - "@google-cloud/projectify" "^3.0.0" - "@google-cloud/promisify" "^3.0.0" + "@google-cloud/paginator" "^5.0.0" + "@google-cloud/projectify" "^4.0.0" + "@google-cloud/promisify" "^4.0.0" abort-controller "^3.0.0" async-retry "^1.3.3" - compressible "^2.0.12" - duplexify "^4.0.0" - ent "^2.2.0" - extend "^3.0.2" - fast-xml-parser "^4.2.2" - gaxios "^5.0.0" - google-auth-library "^8.0.1" + duplexify "^4.1.3" + fast-xml-parser "^4.4.1" + gaxios "^6.0.2" + google-auth-library "^9.6.3" + html-entities "^2.5.2" mime "^3.0.0" - mime-types "^2.0.8" p-limit "^3.0.1" - retry-request "^5.0.0" - teeny-request "^8.0.0" + retry-request "^7.0.0" + teeny-request "^9.0.0" uuid "^8.0.0" "@grpc/grpc-js@^1.10.9": @@ -3016,262 +3117,277 @@ "@types/yargs" "^17.0.8" chalk "^4.0.0" -"@jimp/bmp@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/bmp/-/bmp-0.22.12.tgz#0316044dc7b1a90274aef266d50349347fb864d4" - integrity sha512-aeI64HD0npropd+AR76MCcvvRaa+Qck6loCOS03CkkxGHN5/r336qTM5HPUdHKMDOGzqknuVPA8+kK1t03z12g== +"@jimp/core@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/core/-/core-1.1.4.tgz#54f0c0877bb015361f2cf7d1e1de6fed07e026a9" + integrity sha512-Pokt0rq2qT9oTbQkYVd4z8nIA0eHu2yI3Gd5SmkKQjQa/lRVWRFazqAJMpPkIQt32gSf2rRUVopp7O7wkjjV8w== dependencies: - "@jimp/utils" "^0.22.12" - bmp-js "^0.1.0" - -"@jimp/core@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/core/-/core-0.22.12.tgz#70785ea7d10b138fb65bcfe9f712826f00a10e1d" - integrity sha512-l0RR0dOPyzMKfjUW1uebzueFEDtCOj9fN6pyTYWWOM/VS4BciXQ1VVrJs8pO3kycGYZxncRKhCoygbNr8eEZQA== - dependencies: - "@jimp/utils" "^0.22.12" - any-base "^1.1.0" - buffer "^5.2.0" + "@jimp/file-ops" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + await-to-js "^3.0.0" exif-parser "^0.1.12" - file-type "^16.5.4" - isomorphic-fetch "^3.0.0" - pixelmatch "^4.0.2" - tinycolor2 "^1.6.0" + file-type "^16.0.0" + mime "3" -"@jimp/custom@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/custom/-/custom-0.22.12.tgz#236f2a3f016b533c50869ff22ad1ac00dd0c36be" - integrity sha512-xcmww1O/JFP2MrlGUMd3Q78S3Qu6W3mYTXYuIqFq33EorgYHV/HqymHfXy9GjiCJ7OI+7lWx6nYFOzU7M4rd1Q== +"@jimp/diff@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/diff/-/diff-1.1.4.tgz#505b6f9f738f9a6495f36960662738937fea529b" + integrity sha512-Xc/g1SfphHT9+aeghCxQou8cCmzIArLot31PNXYhx/Bip0Px1wtZHW22sFgCPjGJS6pE/74qRjM0V8VJQYup3w== dependencies: - "@jimp/core" "^0.22.12" + "@jimp/plugin-resize" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + pixelmatch "^5.3.0" -"@jimp/gif@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/gif/-/gif-0.22.12.tgz#6caccb45df497fb971b7a88690345596e22163c0" - integrity sha512-y6BFTJgch9mbor2H234VSjd9iwAhaNf/t3US5qpYIs0TSbAvM02Fbc28IaDETj9+4YB4676sz4RcN/zwhfu1pg== +"@jimp/file-ops@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/file-ops/-/file-ops-1.1.4.tgz#3a1670c1ffdd72a848c10a80187b53556fd05131" + integrity sha512-vJqidRRZlQfaOS/DE9FnkFDmu6Fyx5ZtqTRfBDRr8fAPPDC+N6Fh4//0YQ2CO1xstI35WoPPkJDu6Geq+f1b5Q== + +"@jimp/js-bmp@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/js-bmp/-/js-bmp-1.1.4.tgz#c8c777e2100db8fa69583e7c2ca4cf1d145b7e51" + integrity sha512-fO8dhqfDF08Zw4SXdXD2GqLakR4KInUY6dWkNyOLH+fADsi2jmx/UgcdNiZMGm/iaQSdTdUovgpmLJrr5kQ3Kg== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + bmp-ts "^1.0.9" + +"@jimp/js-gif@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/js-gif/-/js-gif-1.1.4.tgz#5f6d18b250ca3d241de0fd1a0421cff679956ee3" + integrity sha512-/+W2hCPljZg4xEC82W4Zl/gy3ZzQVD05jYovviuHx+T3d/8y/GZWElDp6dHkBefnZ1P3ZEC+sBtLUIyuAz7c4A== + dependencies: + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" gifwrap "^0.10.1" - omggif "^1.0.9" + omggif "^1.0.10" -"@jimp/jpeg@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/jpeg/-/jpeg-0.22.12.tgz#b5c74a5aac9826245311370dda8c71a1fcca05ed" - integrity sha512-Rq26XC/uQWaQKyb/5lksCTCxXhtY01NJeBN+dQv5yNYedN0i7iYu+fXEoRsfaJ8xZzjoANH8sns7rVP4GE7d/Q== +"@jimp/js-jpeg@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/js-jpeg/-/js-jpeg-1.1.4.tgz#b84065aca4f5631497321883f09e841fae7ffc1a" + integrity sha512-Qt7U2MLuLd7fpA9m7LEUvf4oEjYofJtxi7a4XApkHOtRC7+l2KBEpiw2EGwCd1AQ8dnryaO5ehFqALhiIjcv+w== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" jpeg-js "^0.4.4" -"@jimp/plugin-blit@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-blit/-/plugin-blit-0.22.12.tgz#0fa8320767fda77434b4408798655ff7c7e415d4" - integrity sha512-xslz2ZoFZOPLY8EZ4dC29m168BtDx95D6K80TzgUi8gqT7LY6CsajWO0FAxDwHz6h0eomHMfyGX0stspBrTKnQ== +"@jimp/js-png@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/js-png/-/js-png-1.1.4.tgz#128670c6f3de2d7291bb53cbac7fdce95838c2c4" + integrity sha512-F+8d0cHlS5MJnvle5TbQRhe7UIyqbZJlrqYemrfARTeoyhUQo5NYfeOmnnyABl1Jiwvhe7cWzKnlXRkhJZzS6g== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" + pngjs "^7.0.0" -"@jimp/plugin-blur@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-blur/-/plugin-blur-0.22.12.tgz#0c37b2ff4e588b45f4307b4f13d3d0eef813920d" - integrity sha512-S0vJADTuh1Q9F+cXAwFPlrKWzDj2F9t/9JAbUvaaDuivpyWuImEKXVz5PUZw2NbpuSHjwssbTpOZ8F13iJX4uw== +"@jimp/js-tiff@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/js-tiff/-/js-tiff-1.1.4.tgz#e85a7e228e91098c01f84045f9f0e63bfab1d121" + integrity sha512-kopUh2c2vxNjeAljniP8jQnWGWdhlFUfP6RySAnRpRDbp9LhTrpYGngKf/fOxv8MMEXOifGNvQlvzgOrnmF4sQ== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" + utif2 "^4.1.0" -"@jimp/plugin-circle@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-circle/-/plugin-circle-0.22.12.tgz#9fffda83d3fc5bad8c1e1492b15b1433cb42e16e" - integrity sha512-SWVXx1yiuj5jZtMijqUfvVOJBwOifFn0918ou4ftoHgegc5aHWW5dZbYPjvC9fLpvz7oSlptNl2Sxr1zwofjTg== +"@jimp/plugin-blit@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-blit/-/plugin-blit-1.1.4.tgz#07a0ec5c5890697cf4905b5eaf58403920c6848a" + integrity sha512-mwiZp7tSId/2LyFzct456rMulbi+J9Mm9jQ1jhWt7TPM4qjobFXHem5glyU1aNf9CpHcsOP83RUj5me7DavvEg== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-color@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-color/-/plugin-color-0.22.12.tgz#1e49f2e7387186507e917b0686599767c15be336" - integrity sha512-xImhTE5BpS8xa+mAN6j4sMRWaUgUDLoaGHhJhpC+r7SKKErYDR0WQV4yCE4gP+N0gozD0F3Ka1LUSaMXrn7ZIA== +"@jimp/plugin-blur@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-blur/-/plugin-blur-1.1.4.tgz#e55fba4af64f095d51036b2dcbdd2d9f93d3c81a" + integrity sha512-XH+NGrKOQbs5Q0WF4HToWSUz5ts4xRABFcAIDgs9O34iYdTL3K9lPMHAOH+LrB+2uWMzguQQncdEJrPKgNXC4Q== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/utils" "1.1.4" + +"@jimp/plugin-circle@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-circle/-/plugin-circle-1.1.4.tgz#bad7981483219554587ded91e63ec4aa69eb0884" + integrity sha512-zOemNyA5VIgWnC+NQys7FCqpFt6jN7Hvp//G9pL+oD9sXDLQbkR65ZHdpI7iglVtxsq3yc7hFe2ojCRCu3AbSg== + dependencies: + "@jimp/types" "1.1.4" + zod "^3.23.8" + +"@jimp/plugin-color@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-color/-/plugin-color-1.1.4.tgz#bf142840d7570e7964a136f278de6f8134686417" + integrity sha512-j7xJqO9Cr45sLw+UYwCRtoeWl8/mZsBmZEAGdpx4ny2vHD0IMD3S56NcTuSLJ9zFtuyIEJkQFUNFUDaxVxVjag== + dependencies: + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" tinycolor2 "^1.6.0" + zod "^3.23.8" -"@jimp/plugin-contain@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-contain/-/plugin-contain-0.22.12.tgz#ed5ed9af3d4afd02a7568ff8d60603cff340e3f3" - integrity sha512-Eo3DmfixJw3N79lWk8q/0SDYbqmKt1xSTJ69yy8XLYQj9svoBbyRpSnHR+n9hOw5pKXytHwUW6nU4u1wegHNoQ== +"@jimp/plugin-contain@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-contain/-/plugin-contain-1.1.4.tgz#185b9b3fdb85d61b38c8ea9ac060bb05df4e9e68" + integrity sha512-XIMURmXFDdZYyKsETyopBloqndJKk7ohtE6ujO/o//O5/Op9A15deh8yais39A5j8uSyHEsvBwdGtGm4co7rnw== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/plugin-blit" "1.1.4" + "@jimp/plugin-resize" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-cover@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-cover/-/plugin-cover-0.22.12.tgz#4abbfabe4c78c71d8d46e707c35a65dc55f08afd" - integrity sha512-z0w/1xH/v/knZkpTNx+E8a7fnasQ2wHG5ze6y5oL2dhH1UufNua8gLQXlv8/W56+4nJ1brhSd233HBJCo01BXA== +"@jimp/plugin-cover@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-cover/-/plugin-cover-1.1.4.tgz#3b270c4526e24652f772f8a1aa940dc88118e24a" + integrity sha512-VxaQhcCYeJRQcNXrLbOUcn/KAVmVgTNexLucjUvm8uSWCyDfO+HJ6okL/qyux2h05asyCcFXz7zeNswEqjePSg== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/plugin-crop" "1.1.4" + "@jimp/plugin-resize" "1.1.4" + "@jimp/types" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-crop@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-crop/-/plugin-crop-0.22.12.tgz#e28329a9f285071442998560b040048d2ef5c32e" - integrity sha512-FNuUN0OVzRCozx8XSgP9MyLGMxNHHJMFt+LJuFjn1mu3k0VQxrzqbN06yIl46TVejhyAhcq5gLzqmSCHvlcBVw== +"@jimp/plugin-crop@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-crop/-/plugin-crop-1.1.4.tgz#4d6729532f9229b6e54ccf45a72c147872ab3f9f" + integrity sha512-RejGsKWoG0ji2YwvlKKIEnCxZFGHZ7dwcmFIHiWOZs+fhT+HoHbDy9QEIT+MmgWeeIVm0B3MrA/oBMLEwaJbzg== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-displace@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-displace/-/plugin-displace-0.22.12.tgz#2e4b2b989a23da6687c49f2f628e1e6d686ec9b6" - integrity sha512-qpRM8JRicxfK6aPPqKZA6+GzBwUIitiHaZw0QrJ64Ygd3+AsTc7BXr+37k2x7QcyCvmKXY4haUrSIsBug4S3CA== +"@jimp/plugin-displace@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-displace/-/plugin-displace-1.1.4.tgz#63b536a9f550cb03d233970478640fa5973cfae3" + integrity sha512-X+yMdj4DZu/p5YZ9Go7k3HfkC2XTw/5am/p9Fn2xoOJwGa+LIDCAAJ/xuVw+qJMuyvhjIa5rck39yePvBumLyg== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-dither@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-dither/-/plugin-dither-0.22.12.tgz#3cc5f3a58dbf85653c4e532d31a756a4fc8cabf7" - integrity sha512-jYgGdSdSKl1UUEanX8A85v4+QUm+PE8vHFwlamaKk89s+PXQe7eVE3eNeSZX4inCq63EHL7cX580dMqkoC3ZLw== +"@jimp/plugin-dither@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-dither/-/plugin-dither-1.1.4.tgz#96ae3a59b66f5f9ee9b7dc0a4e7d572a6c38e2a0" + integrity sha512-GvyRicPVpxlyol304C4v3T/OpJiuER4ibIhMTAKPx393ByvRgoo1r69nVRfEbmYoKAlwxEA+DISoDgBWYFq4yw== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/types" "1.1.4" -"@jimp/plugin-fisheye@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-fisheye/-/plugin-fisheye-0.22.12.tgz#77aef2f3ec59c0bafbd2dbc94b89eab60ce05a3e" - integrity sha512-LGuUTsFg+fOp6KBKrmLkX4LfyCy8IIsROwoUvsUPKzutSqMJnsm3JGDW2eOmWIS/jJpPaeaishjlxvczjgII+Q== +"@jimp/plugin-fisheye@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-fisheye/-/plugin-fisheye-1.1.4.tgz#1f2b611e939c9546c5d9d9abe416d1e10407d88a" + integrity sha512-mX2yUzndi9esrcEIv9wQIChTLhehZ0SNjRY81BMS9vxa3poxrLyNDq2GHSVmWcehSpkMmATVYKQy6AcLcfynSA== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-flip@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-flip/-/plugin-flip-0.22.12.tgz#7e2154592da01afcf165a3f9d1d25032aa8d8c57" - integrity sha512-m251Rop7GN8W0Yo/rF9LWk6kNclngyjIJs/VXHToGQ6EGveOSTSQaX2Isi9f9lCDLxt+inBIb7nlaLLxnvHX8Q== +"@jimp/plugin-flip@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-flip/-/plugin-flip-1.1.4.tgz#ef8c6734a16b4385cb70bd8b84a2dbd14941f295" + integrity sha512-dhhM1tY21QqnaSvgh9Evpq09+IgAfeZJwLTTJnoWN5j+suE/+K9fIlMSx8XKbv3hBvOBVPHUrq8xoiTLHryr5w== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/types" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-gaussian@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-gaussian/-/plugin-gaussian-0.22.12.tgz#49a40950cedbbea6c84b3a6bccc45365fe78d6b7" - integrity sha512-sBfbzoOmJ6FczfG2PquiK84NtVGeScw97JsCC3rpQv1PHVWyW+uqWFF53+n3c8Y0P2HWlUjflEla2h/vWShvhg== +"@jimp/plugin-hash@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-hash/-/plugin-hash-1.1.4.tgz#3af4a37bf31fbfb15a3263245efd07da10b7981f" + integrity sha512-nmjnQwxcNVTq7qlkUuX5OzdwO/F+mnE2QT+TZ6VEAphPT8Iu7ZaJfYd3wxAbon9UrbxZULFnhiEOs6yE2dWYaw== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/js-bmp" "1.1.4" + "@jimp/js-jpeg" "1.1.4" + "@jimp/js-png" "1.1.4" + "@jimp/js-tiff" "1.1.4" + "@jimp/plugin-color" "1.1.4" + "@jimp/plugin-resize" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + any-base "^1.1.0" -"@jimp/plugin-invert@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-invert/-/plugin-invert-0.22.12.tgz#c569e85c1f59911a9a33ef36a51c9cf26065078e" - integrity sha512-N+6rwxdB+7OCR6PYijaA/iizXXodpxOGvT/smd/lxeXsZ/empHmFFFJ/FaXcYh19Tm04dGDaXcNF/dN5nm6+xQ== +"@jimp/plugin-mask@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-mask/-/plugin-mask-1.1.4.tgz#6ad63a0f8c3c5b99439ba79e2a48bb00bfbc7e36" + integrity sha512-86Duc7r9kdv26oaApwHtFULMHxLCBoBdeAA/PyH1RRsZy2eu+M8hGFYf99vJlLBgJslyToGjxIgDE1nsuB1uHA== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/types" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-mask@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-mask/-/plugin-mask-0.22.12.tgz#0ac0d9c282f403255b126556521f90fb8e2997f0" - integrity sha512-4AWZg+DomtpUA099jRV8IEZUfn1wLv6+nem4NRJC7L/82vxzLCgXKTxvNvBcNmJjT9yS1LAAmiJGdWKXG63/NA== +"@jimp/plugin-print@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-print/-/plugin-print-1.1.4.tgz#dc3c4c9130cda0c3571b3c352546f0d69aae4046" + integrity sha512-EMkakkwi1qrcmQ4nexD2w5ZEUxgesFd7lcYR7DBCXKBYabvC9RDHXaWTxzeFa0VWy3/ZnpoJJ3Qq8I8WqHvjmw== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/js-jpeg" "1.1.4" + "@jimp/js-png" "1.1.4" + "@jimp/plugin-blit" "1.1.4" + "@jimp/types" "1.1.4" + parse-bmfont-ascii "^1.0.6" + parse-bmfont-binary "^1.0.6" + parse-bmfont-xml "^1.1.6" + zod "^3.23.8" -"@jimp/plugin-normalize@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-normalize/-/plugin-normalize-0.22.12.tgz#6c44d216f2489cf9b0e0f1e03aa5dfb97f198c53" - integrity sha512-0So0rexQivnWgnhacX4cfkM2223YdExnJTTy6d06WbkfZk5alHUx8MM3yEzwoCN0ErO7oyqEWRnEkGC+As1FtA== +"@jimp/plugin-quantize@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-quantize/-/plugin-quantize-1.1.4.tgz#a883eeef1f6144354bb868921895a89ca40d557f" + integrity sha512-+DuC7ZXjNGFoZtsYU2MxXz06E48AIBNg1G/2sq2bXu+PJEU0xvQctEvJEdl+xhRo4sQNQpwOzCZtBvY49VqfNA== dependencies: - "@jimp/utils" "^0.22.12" + image-q "^4.0.0" + zod "^3.23.8" -"@jimp/plugin-print@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-print/-/plugin-print-0.22.12.tgz#6a49020947a9bf21a5a28324425670a25587ca65" - integrity sha512-c7TnhHlxm87DJeSnwr/XOLjJU/whoiKYY7r21SbuJ5nuH+7a78EW1teOaj5gEr2wYEd7QtkFqGlmyGXY/YclyQ== +"@jimp/plugin-resize@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-resize/-/plugin-resize-1.1.4.tgz#2107bda637dfa05e01aab341b2ba96cbb8b78835" + integrity sha512-+KY0A5agiOpV60cfs28DZCl3t/8QRVO9kyzrdDqCLkhc/7g2YYrdyhkJZYUq5GBJirkpGGzXZQR2t+g7Sc9dEQ== dependencies: - "@jimp/utils" "^0.22.12" - load-bmfont "^1.4.1" + "@jimp/core" "1.1.4" + "@jimp/types" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-resize@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-resize/-/plugin-resize-0.22.12.tgz#f92acbf73beb97dd1fe93b166ef367a323b81e81" - integrity sha512-3NyTPlPbTnGKDIbaBgQ3HbE6wXbAlFfxHVERmrbqAi8R3r6fQPxpCauA8UVDnieg5eo04D0T8nnnNIX//i/sXg== +"@jimp/plugin-rotate@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-rotate/-/plugin-rotate-1.1.4.tgz#2c1c963ba5e522a81514fe3870e1c6c823113a47" + integrity sha512-9yRcL5cFcA88kVDt9nco1BUipAjw6uto6AOJi2Bp3FFfdJ84F3rU6Jvcbl4aDyywoJ+J93gKXRo/GAEPk8xpvg== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/plugin-crop" "1.1.4" + "@jimp/plugin-resize" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-rotate@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-rotate/-/plugin-rotate-0.22.12.tgz#2235d45aeb4914ff70d99e95750a6d9de45a0d9f" - integrity sha512-9YNEt7BPAFfTls2FGfKBVgwwLUuKqy+E8bDGGEsOqHtbuhbshVGxN2WMZaD4gh5IDWvR+emmmPPWGgaYNYt1gA== +"@jimp/plugin-threshold@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/plugin-threshold/-/plugin-threshold-1.1.4.tgz#6625758b6392124d284dbed07156c63fb155b874" + integrity sha512-d2uTz8iNuW3ogjH/OVEmPtiSzIpr99Dk5mTOXmnojSqT/5Ufs2ngJf3JQ2wIwOyb6pXph+xRqseQjhf1EUXasA== dependencies: - "@jimp/utils" "^0.22.12" + "@jimp/core" "1.1.4" + "@jimp/plugin-color" "1.1.4" + "@jimp/plugin-hash" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" + zod "^3.23.8" -"@jimp/plugin-scale@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-scale/-/plugin-scale-0.22.12.tgz#91f1ec3d114ff44092b946a16e66b14d918e32ed" - integrity sha512-dghs92qM6MhHj0HrV2qAwKPMklQtjNpoYgAB94ysYpsXslhRTiPisueSIELRwZGEr0J0VUxpUY7HgJwlSIgGZw== +"@jimp/types@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/types/-/types-1.1.4.tgz#b46881102fc6d353451e18ccf06a3dab4dbfb0d5" + integrity sha512-Ck7ShGOeRjN1E2NH9YQs1UDD8Sh54XzSjLhbNq3gtbXrDgSAUH2e47K1VLoUHVBdq7COTDlDCBPOFb/kgQz0zQ== dependencies: - "@jimp/utils" "^0.22.12" + zod "^3.23.8" -"@jimp/plugin-shadow@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-shadow/-/plugin-shadow-0.22.12.tgz#52e3a1d55f61ddfcfb3265544f8d23b887a667b8" - integrity sha512-FX8mTJuCt7/3zXVoeD/qHlm4YH2bVqBuWQHXSuBK054e7wFRnRnbSLPUqAwSeYP3lWqpuQzJtgiiBxV3+WWwTg== +"@jimp/utils@1.1.4": + version "1.1.4" + resolved "https://registry.yarnpkg.com/@jimp/utils/-/utils-1.1.4.tgz#5af44854cc74be08253e6ed7dee82a9058765406" + integrity sha512-mkfoOtC3/vVibCQz3MQkbt8FMtuJI56ekcoDBJqcY9Pjyyd7nbOhfWhiLiLYIfcrslJX8pLEq4ewR2PTNRXTfA== dependencies: - "@jimp/utils" "^0.22.12" - -"@jimp/plugin-threshold@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugin-threshold/-/plugin-threshold-0.22.12.tgz#1efe20e154bf3a1fc4a5cc016092dbacaa60c958" - integrity sha512-4x5GrQr1a/9L0paBC/MZZJjjgjxLYrqSmWd+e+QfAEPvmRxdRoQ5uKEuNgXnm9/weHQBTnQBQsOY2iFja+XGAw== - dependencies: - "@jimp/utils" "^0.22.12" - -"@jimp/plugins@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/plugins/-/plugins-0.22.12.tgz#45a3b96d2d24cec21d4f8b79d1cfcec6fcb2f1d4" - integrity sha512-yBJ8vQrDkBbTgQZLty9k4+KtUQdRjsIDJSPjuI21YdVeqZxYywifHl4/XWILoTZsjTUASQcGoH0TuC0N7xm3ww== - dependencies: - "@jimp/plugin-blit" "^0.22.12" - "@jimp/plugin-blur" "^0.22.12" - "@jimp/plugin-circle" "^0.22.12" - "@jimp/plugin-color" "^0.22.12" - "@jimp/plugin-contain" "^0.22.12" - "@jimp/plugin-cover" "^0.22.12" - "@jimp/plugin-crop" "^0.22.12" - "@jimp/plugin-displace" "^0.22.12" - "@jimp/plugin-dither" "^0.22.12" - "@jimp/plugin-fisheye" "^0.22.12" - "@jimp/plugin-flip" "^0.22.12" - "@jimp/plugin-gaussian" "^0.22.12" - "@jimp/plugin-invert" "^0.22.12" - "@jimp/plugin-mask" "^0.22.12" - "@jimp/plugin-normalize" "^0.22.12" - "@jimp/plugin-print" "^0.22.12" - "@jimp/plugin-resize" "^0.22.12" - "@jimp/plugin-rotate" "^0.22.12" - "@jimp/plugin-scale" "^0.22.12" - "@jimp/plugin-shadow" "^0.22.12" - "@jimp/plugin-threshold" "^0.22.12" - timm "^1.6.1" - -"@jimp/png@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/png/-/png-0.22.12.tgz#e033586caf38d9c9d33808e92eb87c4d7f0aa1eb" - integrity sha512-Mrp6dr3UTn+aLK8ty/dSKELz+Otdz1v4aAXzV5q53UDD2rbB5joKVJ/ChY310B+eRzNxIovbUF1KVrUsYdE8Hg== - dependencies: - "@jimp/utils" "^0.22.12" - pngjs "^6.0.0" - -"@jimp/tiff@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/tiff/-/tiff-0.22.12.tgz#67cac3f2ded6fde3ef631fbf74bea0fa53800123" - integrity sha512-E1LtMh4RyJsoCAfAkBRVSYyZDTtLq9p9LUiiYP0vPtXyxX4BiYBUYihTLSBlCQg5nF2e4OpQg7SPrLdJ66u7jg== - dependencies: - utif2 "^4.0.1" - -"@jimp/types@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/types/-/types-0.22.12.tgz#6f83761ba171cb8cd5998fa66a5cbfb0b22d3d8c" - integrity sha512-wwKYzRdElE1MBXFREvCto5s699izFHNVvALUv79GXNbsOVqlwlOxlWJ8DuyOGIXoLP4JW/m30YyuTtfUJgMRMA== - dependencies: - "@jimp/bmp" "^0.22.12" - "@jimp/gif" "^0.22.12" - "@jimp/jpeg" "^0.22.12" - "@jimp/png" "^0.22.12" - "@jimp/tiff" "^0.22.12" - timm "^1.6.1" - -"@jimp/utils@^0.22.12": - version "0.22.12" - resolved "https://registry.yarnpkg.com/@jimp/utils/-/utils-0.22.12.tgz#8ffaed8f2dc2962539ccaf14727ac60793c7a537" - integrity sha512-yJ5cWUknGnilBq97ZXOyOS0HhsHOyAyjHwYfHxGbSyMTohgQI6sVyE8KPgDwH8HHW/nMKXk8TrSwAE71zt716Q== - dependencies: - regenerator-runtime "^0.13.3" + "@jimp/types" "1.1.4" + tinycolor2 "^1.6.0" "@jridgewell/gen-mapping@^0.3.2", "@jridgewell/gen-mapping@^0.3.5": version "0.3.5" @@ -3321,7 +3437,7 @@ "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" -"@js-joda/core@^5.5.3": +"@js-joda/core@^5.6.1": version "5.6.3" resolved "https://registry.yarnpkg.com/@js-joda/core/-/core-5.6.3.tgz#41ae1c07de1ebe0f6dde1abcbc9700a09b9c6056" integrity sha512-T1rRxzdqkEXcou0ZprN1q9yDRlvzCPLqmlNt5IIsGBzoEVgLCCYrKEwc84+TvsXuAc95VAZwtWD2zVsKPY4bcA== @@ -3336,6 +3452,25 @@ resolved "https://registry.yarnpkg.com/@jsdevtools/ono/-/ono-7.1.3.tgz#9df03bbd7c696a5c58885c34aa06da41c8543796" integrity sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg== +"@jsep-plugin/assignment@^1.2.1": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@jsep-plugin/assignment/-/assignment-1.3.0.tgz#fcfc5417a04933f7ceee786e8ab498aa3ce2b242" + integrity sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ== + +"@jsep-plugin/regex@^1.0.3": + version "1.0.4" + resolved "https://registry.yarnpkg.com/@jsep-plugin/regex/-/regex-1.0.4.tgz#cb2fc423220fa71c609323b9ba7f7d344a755fcc" + integrity sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg== + +"@koa/router@13.1.0": + version "13.1.0" + resolved "https://registry.yarnpkg.com/@koa/router/-/router-13.1.0.tgz#43f4c554444ea4f4a148a5735a9525c6d16fd1b5" + integrity sha512-mNVu1nvkpSd8Q8gMebGbCkDWJ51ODetrFvLKYusej+V0ByD4btqHYnPIzTBLXnQMVUlm/oxVwqmWBY3zQfZilw== + dependencies: + http-errors "^2.0.0" + koa-compose "^4.1.0" + path-to-regexp "^6.3.0" + "@koa/router@8.0.8": version "8.0.8" resolved "https://registry.yarnpkg.com/@koa/router/-/router-8.0.8.tgz#95f32d11373d03d89dcb63fabe9ac6f471095236" @@ -3806,10 +3941,15 @@ dependencies: "@octokit/openapi-types" "^18.0.0" +"@opentelemetry/api@>=1.0.0 <1.9.0": + version "1.8.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.8.0.tgz#5aa7abb48f23f693068ed2999ae627d2f7d902ec" + integrity sha512-I/s6F7yKUDdtMsoBWXJe8Qz40Tui5vsuKCWJEWVL+5q9sSWRzzx6v2KeNsOBEwd94j0eWkpWCH4yB6rZg9Mf0w== + "@opentelemetry/api@^1.0.0", "@opentelemetry/api@^1.0.1": - version "1.7.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.7.0.tgz#b139c81999c23e3c8d3c0a7234480e945920fc40" - integrity sha512-AdY5wvN0P2vXBi3b29hxZgSFvdhdxPB9+f0B6s//P9Q8nibRWeA3cHm8UmLpio9ABigkVHJ5NMPk+Mz8VCCyrw== + version "1.9.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.9.0.tgz#d03eba68273dc0f7509e2a3d5cba21eae10379fe" + integrity sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg== "@opentelemetry/core@^1.14.0": version "1.19.0" @@ -4242,441 +4382,545 @@ dependencies: "@sinonjs/commons" "^2.0.0" -"@smithy/abort-controller@^2.0.10": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-2.0.10.tgz#a6d0d24973ac35b59cc450c34decd68485fbe2c0" - integrity "sha1-ptDSSXOsNbWcxFDDTezWhIX74sA= sha512-xn7PnFD3m4rQIG00h1lPuDVnC2QMtTFhzRLX3y56KkgFaCysS7vpNevNBgmNUtmJ4eVFc+66Zucwo2KDLdicOg==" +"@smithy/abort-controller@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-1.1.0.tgz#2da0d73c504b93ca8bb83bdc8d6b8208d73f418b" + integrity sha512-5imgGUlZL4dW4YWdMYAKLmal9ny/tlenM81QZY7xYyb76z9Z/QOg7oM5Ak9HQl8QfFTlGVWwcMXl+54jroRgEQ== dependencies: - "@smithy/types" "^2.3.4" + "@smithy/types" "^1.2.0" tslib "^2.5.0" -"@smithy/chunked-blob-reader-native@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-2.0.0.tgz#f6d0eeeb5481026b68b054f45540d924c194d558" - integrity "sha1-9tDu61SBAmtosFT0VUDZJMGU1Vg= sha512-HM8V2Rp1y8+1343tkZUKZllFhEQPNmpNdgFAncbTsxkZ18/gqjk23XXv3qGyXWp412f3o43ZZ1UZHVcHrpRnCQ==" +"@smithy/abort-controller@^3.1.8": + version "3.1.8" + resolved "https://registry.yarnpkg.com/@smithy/abort-controller/-/abort-controller-3.1.8.tgz#ce0c10ddb2b39107d70b06bbb8e4f6e368bc551d" + integrity sha512-+3DOBcUn5/rVjlxGvUPKc416SExarAQ+Qe0bqk30YSUjbepwpS7QN0cyKUSifvLJhdMZ0WPzPP5ymut0oonrpQ== dependencies: - "@smithy/util-base64" "^2.0.0" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/chunked-blob-reader-native@^3.0.1": + version "3.0.1" + resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader-native/-/chunked-blob-reader-native-3.0.1.tgz#39045ed278ee1b6f4c12715c7565678557274c29" + integrity sha512-VEYtPvh5rs/xlyqpm5NRnfYLZn+q0SRPELbvBV+C/G7IQ+ouTuo+NKKa3ShG5OaFR8NYVMXls9hPYLTvIKKDrQ== + dependencies: + "@smithy/util-base64" "^3.0.0" + tslib "^2.6.2" + +"@smithy/chunked-blob-reader@^4.0.0": + version "4.0.0" + resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader/-/chunked-blob-reader-4.0.0.tgz#754099909957fb1986c16eb88afad75919d7129d" + integrity sha512-jSqRnZvkT4egkq/7b6/QRCNXmmYVcHwnJldqJ3IhVpQE2atObVJ137xmGeuGFhjFUr8gCEVAOKwSY79OvpbDaQ== + dependencies: + tslib "^2.6.2" + +"@smithy/config-resolver@^3.0.11", "@smithy/config-resolver@^3.0.12": + version "3.0.12" + resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-3.0.12.tgz#f355f95fcb5ee932a90871a488a4f2128e8ad3ac" + integrity sha512-YAJP9UJFZRZ8N+UruTeq78zkdjUHmzsY62J4qKWZ4SXB4QXJ/+680EfXXgkYA2xj77ooMqtUY9m406zGNqwivQ== + dependencies: + "@smithy/node-config-provider" "^3.1.11" + "@smithy/types" "^3.7.1" + "@smithy/util-config-provider" "^3.0.0" + "@smithy/util-middleware" "^3.0.10" + tslib "^2.6.2" + +"@smithy/core@^2.5.2", "@smithy/core@^2.5.3": + version "2.5.3" + resolved "https://registry.yarnpkg.com/@smithy/core/-/core-2.5.3.tgz#1d5723f676b0d6ec08c515272f0ac03aa59fac72" + integrity sha512-96uW8maifUSmehaeW7uydWn7wBc98NEeNI3zN8vqakGpyCQgzyJaA64Z4FCOUmAdCJkhppd/7SZ798Fo4Xx37g== + dependencies: + "@smithy/middleware-serde" "^3.0.10" + "@smithy/protocol-http" "^4.1.7" + "@smithy/types" "^3.7.1" + "@smithy/util-body-length-browser" "^3.0.0" + "@smithy/util-middleware" "^3.0.10" + "@smithy/util-stream" "^3.3.1" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/credential-provider-imds@^3.2.6", "@smithy/credential-provider-imds@^3.2.7": + version "3.2.7" + resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-3.2.7.tgz#6eedf87ba0238723ec46d8ce0f18e276685a702d" + integrity sha512-cEfbau+rrWF8ylkmmVAObOmjbTIzKyUC5TkBL58SbLywD0RCBC4JAUKbmtSm2w5KUJNRPGgpGFMvE2FKnuNlWQ== + dependencies: + "@smithy/node-config-provider" "^3.1.11" + "@smithy/property-provider" "^3.1.10" + "@smithy/types" "^3.7.1" + "@smithy/url-parser" "^3.0.10" + tslib "^2.6.2" + +"@smithy/eventstream-codec@^3.1.9": + version "3.1.9" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-3.1.9.tgz#4271354e75e57d30771fca307da403896c657430" + integrity sha512-F574nX0hhlNOjBnP+noLtsPFqXnWh2L0+nZKCwcu7P7J8k+k+rdIDs+RMnrMwrzhUE4mwMgyN0cYnEn0G8yrnQ== + dependencies: + "@aws-crypto/crc32" "5.2.0" + "@smithy/types" "^3.7.1" + "@smithy/util-hex-encoding" "^3.0.0" + tslib "^2.6.2" + +"@smithy/eventstream-serde-browser@^3.0.12": + version "3.0.13" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-3.0.13.tgz#191dcf9181e7ab0914ec43d51518d471b9d466ae" + integrity sha512-Nee9m+97o9Qj6/XeLz2g2vANS2SZgAxV4rDBMKGHvFJHU/xz88x2RwCkwsvEwYjSX4BV1NG1JXmxEaDUzZTAtw== + dependencies: + "@smithy/eventstream-serde-universal" "^3.0.12" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/eventstream-serde-config-resolver@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-3.0.10.tgz#5c0b2ae0bb8e11cfa77851098e46f7350047ec8d" + integrity sha512-K1M0x7P7qbBUKB0UWIL5KOcyi6zqV5mPJoL0/o01HPJr0CSq3A9FYuJC6e11EX6hR8QTIR++DBiGrYveOu6trw== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/eventstream-serde-node@^3.0.11": + version "3.0.12" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-3.0.12.tgz#7312383e821b5807abf2fe12316c2a8967d022f0" + integrity sha512-kiZymxXvZ4tnuYsPSMUHe+MMfc4FTeFWJIc0Q5wygJoUQM4rVHNghvd48y7ppuulNMbuYt95ah71pYc2+o4JOA== + dependencies: + "@smithy/eventstream-serde-universal" "^3.0.12" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/eventstream-serde-universal@^3.0.12": + version "3.0.12" + resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-3.0.12.tgz#803d7beb29a3de4a64e91af97331a4654741c35f" + integrity sha512-1i8ifhLJrOZ+pEifTlF0EfZzMLUGQggYQ6WmZ4d5g77zEKf7oZ0kvh1yKWHPjofvOwqrkwRDVuxuYC8wVd662A== + dependencies: + "@smithy/eventstream-codec" "^3.1.9" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/fetch-http-handler@^4.1.0", "@smithy/fetch-http-handler@^4.1.1": + version "4.1.1" + resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-4.1.1.tgz#cead80762af4cdea11e7eeb627ea1c4835265dfa" + integrity sha512-bH7QW0+JdX0bPBadXt8GwMof/jz0H28I84hU1Uet9ISpzUqXqRQ3fEZJ+ANPOhzSEczYvANNl3uDQDYArSFDtA== + dependencies: + "@smithy/protocol-http" "^4.1.7" + "@smithy/querystring-builder" "^3.0.10" + "@smithy/types" "^3.7.1" + "@smithy/util-base64" "^3.0.0" + tslib "^2.6.2" + +"@smithy/hash-blob-browser@^3.1.8": + version "3.1.9" + resolved "https://registry.yarnpkg.com/@smithy/hash-blob-browser/-/hash-blob-browser-3.1.9.tgz#1f2c3ef6afbb0ce3e58a0129753850bb9267aae8" + integrity sha512-wOu78omaUuW5DE+PVWXiRKWRZLecARyP3xcq5SmkXUw9+utgN8HnSnBfrjL2B/4ZxgqPjaAJQkC/+JHf1ITVaQ== + dependencies: + "@smithy/chunked-blob-reader" "^4.0.0" + "@smithy/chunked-blob-reader-native" "^3.0.1" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/hash-node@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-3.0.10.tgz#93c857b4bff3a48884886440fd9772924887e592" + integrity sha512-3zWGWCHI+FlJ5WJwx73Mw2llYR8aflVyZN5JhoqLxbdPZi6UyKSdCeXAWJw9ja22m6S6Tzz1KZ+kAaSwvydi0g== + dependencies: + "@smithy/types" "^3.7.1" + "@smithy/util-buffer-from" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/hash-stream-node@^3.1.8": + version "3.1.9" + resolved "https://registry.yarnpkg.com/@smithy/hash-stream-node/-/hash-stream-node-3.1.9.tgz#97eb416811b7e7b9d036f0271588151b619759e9" + integrity sha512-3XfHBjSP3oDWxLmlxnt+F+FqXpL3WlXs+XXaB6bV9Wo8BBu87fK1dSEsyH7Z4ZHRmwZ4g9lFMdf08m9hoX1iRA== + dependencies: + "@smithy/types" "^3.7.1" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/invalid-dependency@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-3.0.10.tgz#8616dee555916c24dec3e33b1e046c525efbfee3" + integrity sha512-Lp2L65vFi+cj0vFMu2obpPW69DU+6O5g3086lmI4XcnRCG8PxvpWC7XyaVwJCxsZFzueHjXnrOH/E0pl0zikfA== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/is-array-buffer@^2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.2.0.tgz#f84f0d9f9a36601a9ca9381688bd1b726fd39111" + integrity sha512-GGP3O9QFD24uGeAXYUjwSTXARoqpZykHadOmA8G5vfJPK0/DC67qa//0qvqrJzL1xc8WQWX7/yc7fwudjPHPhA== + dependencies: + tslib "^2.6.2" + +"@smithy/is-array-buffer@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-3.0.0.tgz#9a95c2d46b8768946a9eec7f935feaddcffa5e7a" + integrity sha512-+Fsu6Q6C4RSJiy81Y8eApjEB5gVtM+oFKTffg+jSuwtvomJJrhUJBu2zS8wjXSgH/g1MKEWrzyChTBe6clb5FQ== + dependencies: + tslib "^2.6.2" + +"@smithy/md5-js@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/md5-js/-/md5-js-3.0.10.tgz#52ab927cf03cd1d24fed82d8ba936faf5632436e" + integrity sha512-m3bv6dApflt3fS2Y1PyWPUtRP7iuBlvikEOGwu0HsCZ0vE7zcIX+dBoh3e+31/rddagw8nj92j0kJg2TfV+SJA== + dependencies: + "@smithy/types" "^3.7.1" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/middleware-content-length@^3.0.11": + version "3.0.12" + resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-3.0.12.tgz#3b248ed1e8f1e0ae67171abb8eae9da7ab7ca613" + integrity sha512-1mDEXqzM20yywaMDuf5o9ue8OkJ373lSPbaSjyEvkWdqELhFMyNNgKGWL/rCSf4KME8B+HlHKuR8u9kRj8HzEQ== + dependencies: + "@smithy/protocol-http" "^4.1.7" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/middleware-endpoint@^3.2.2", "@smithy/middleware-endpoint@^3.2.3": + version "3.2.3" + resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-3.2.3.tgz#7dd3df0052fc55891522631a7751e613b6efd68a" + integrity sha512-Hdl9296i/EMptaX7agrSzJZDiz5Y8XPUeBbctTmMtnCguGpqfU3jVsTUan0VLaOhsnquqWLL8Bl5HrlbVGT1og== + dependencies: + "@smithy/core" "^2.5.3" + "@smithy/middleware-serde" "^3.0.10" + "@smithy/node-config-provider" "^3.1.11" + "@smithy/shared-ini-file-loader" "^3.1.11" + "@smithy/types" "^3.7.1" + "@smithy/url-parser" "^3.0.10" + "@smithy/util-middleware" "^3.0.10" + tslib "^2.6.2" + +"@smithy/middleware-retry@^3.0.26": + version "3.0.27" + resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-3.0.27.tgz#2e4dda420178835cd2d416479505d313b601ba21" + integrity sha512-H3J/PjJpLL7Tt+fxDKiOD25sMc94YetlQhCnYeNmina2LZscAdu0ZEZPas/kwePHABaEtqp7hqa5S4UJgMs1Tg== + dependencies: + "@smithy/node-config-provider" "^3.1.11" + "@smithy/protocol-http" "^4.1.7" + "@smithy/service-error-classification" "^3.0.10" + "@smithy/smithy-client" "^3.4.4" + "@smithy/types" "^3.7.1" + "@smithy/util-middleware" "^3.0.10" + "@smithy/util-retry" "^3.0.10" + tslib "^2.6.2" + uuid "^9.0.1" + +"@smithy/middleware-serde@^3.0.10", "@smithy/middleware-serde@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-3.0.10.tgz#5f6c0b57b10089a21d355bd95e9b7d40378454d7" + integrity sha512-MnAuhh+dD14F428ubSJuRnmRsfOpxSzvRhaGVTvd/lrUDE3kxzCCmH8lnVTvoNQnV2BbJ4c15QwZ3UdQBtFNZA== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/middleware-stack@^3.0.10", "@smithy/middleware-stack@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-3.0.10.tgz#73e2fde5d151440844161773a17ee13375502baf" + integrity sha512-grCHyoiARDBBGPyw2BeicpjgpsDFWZZxptbVKb3CRd/ZA15F/T6rZjCCuBUjJwdck1nwUuIxYtsS4H9DDpbP5w== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/node-config-provider@^3.1.10", "@smithy/node-config-provider@^3.1.11": + version "3.1.11" + resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-3.1.11.tgz#95feba85a5cb3de3fe9adfff1060b35fd556d023" + integrity sha512-URq3gT3RpDikh/8MBJUB+QGZzfS7Bm6TQTqoh4CqE8NBuyPkWa5eUXj0XFcFfeZVgg3WMh1u19iaXn8FvvXxZw== + dependencies: + "@smithy/property-provider" "^3.1.10" + "@smithy/shared-ini-file-loader" "^3.1.11" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/node-http-handler@^1.0.2": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-1.1.0.tgz#887cee930b520e08043c9f41e463f8d8f5dae127" + integrity sha512-d3kRriEgaIiGXLziAM8bjnaLn1fthCJeTLZIwEIpzQqe6yPX0a+yQoLCTyjb2fvdLwkMoG4p7THIIB5cj5lkbg== + dependencies: + "@smithy/abort-controller" "^1.1.0" + "@smithy/protocol-http" "^1.2.0" + "@smithy/querystring-builder" "^1.1.0" + "@smithy/types" "^1.2.0" tslib "^2.5.0" -"@smithy/chunked-blob-reader@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/chunked-blob-reader/-/chunked-blob-reader-2.0.0.tgz#c44fe2c780eaf77f9e5381d982ac99a880cce51b" - integrity "sha1-xE/ix4Dq93+eU4HZgqyZqIDM5Rs= sha512-k+J4GHJsMSAIQPChGBrjEmGS+WbPonCXesoqP9fynIqjn7rdOThdH8FAeCmokP9mxTYKQAKoHCLPzNlm6gh7Wg==" +"@smithy/node-http-handler@^3.3.0", "@smithy/node-http-handler@^3.3.1": + version "3.3.1" + resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-3.3.1.tgz#788fc1c22c21a0cf982f4025ccf9f64217f3164f" + integrity sha512-fr+UAOMGWh6bn4YSEezBCpJn9Ukp9oR4D32sCjCo7U81evE11YePOQ58ogzyfgmjIO79YeOdfXXqr0jyhPQeMg== + dependencies: + "@smithy/abort-controller" "^3.1.8" + "@smithy/protocol-http" "^4.1.7" + "@smithy/querystring-builder" "^3.0.10" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/property-provider@^3.1.10", "@smithy/property-provider@^3.1.9": + version "3.1.10" + resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-3.1.10.tgz#ae00447c1060c194c3e1b9475f7c8548a70f8486" + integrity sha512-n1MJZGTorTH2DvyTVj+3wXnd4CzjJxyXeOgnTlgNVFxaaMeT4OteEp4QrzF8p9ee2yg42nvyVK6R/awLCakjeQ== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/protocol-http@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-1.2.0.tgz#a554e4dabb14508f0bc2cdef9c3710e2b294be04" + integrity sha512-GfGfruksi3nXdFok5RhgtOnWe5f6BndzYfmEXISD+5gAGdayFGpjWu5pIqIweTudMtse20bGbc+7MFZXT1Tb8Q== + dependencies: + "@smithy/types" "^1.2.0" + tslib "^2.5.0" + +"@smithy/protocol-http@^4.1.6", "@smithy/protocol-http@^4.1.7": + version "4.1.7" + resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-4.1.7.tgz#5c67e62beb5deacdb94f2127f9a344bdf1b2ed6e" + integrity sha512-FP2LepWD0eJeOTm0SjssPcgqAlDFzOmRXqXmGhfIM52G7Lrox/pcpQf6RP4F21k0+O12zaqQt5fCDOeBtqY6Cg== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/querystring-builder@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-1.1.0.tgz#de6306104640ade34e59be33949db6cc64aa9d7f" + integrity sha512-gDEi4LxIGLbdfjrjiY45QNbuDmpkwh9DX4xzrR2AzjjXpxwGyfSpbJaYhXARw9p17VH0h9UewnNQXNwaQyYMDA== + dependencies: + "@smithy/types" "^1.2.0" + "@smithy/util-uri-escape" "^1.1.0" + tslib "^2.5.0" + +"@smithy/querystring-builder@^3.0.10": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-3.0.10.tgz#db8773af85ee3977c82b8e35a5cdd178c621306d" + integrity sha512-nT9CQF3EIJtIUepXQuBFb8dxJi3WVZS3XfuDksxSCSn+/CzZowRLdhDn+2acbBv8R6eaJqPupoI/aRFIImNVPQ== + dependencies: + "@smithy/types" "^3.7.1" + "@smithy/util-uri-escape" "^3.0.0" + tslib "^2.6.2" + +"@smithy/querystring-parser@^3.0.10": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-3.0.10.tgz#62db744a1ed2cf90f4c08d2c73d365e033b4a11c" + integrity sha512-Oa0XDcpo9SmjhiDD9ua2UyM3uU01ZTuIrNdZvzwUTykW1PM8o2yJvMh1Do1rY5sUQg4NDV70dMi0JhDx4GyxuQ== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/service-error-classification@^3.0.10": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-3.0.10.tgz#941c549daf0e9abb84d3def1d9e1e3f0f74f5ba6" + integrity sha512-zHe642KCqDxXLuhs6xmHVgRwy078RfqxP2wRDpIyiF8EmsWXptMwnMwbVa50lw+WOGNrYm9zbaEg0oDe3PTtvQ== + dependencies: + "@smithy/types" "^3.7.1" + +"@smithy/shared-ini-file-loader@^3.1.10", "@smithy/shared-ini-file-loader@^3.1.11": + version "3.1.11" + resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-3.1.11.tgz#0b4f98c4a66480956fbbefc4627c5dc09d891aea" + integrity sha512-AUdrIZHFtUgmfSN4Gq9nHu3IkHMa1YDcN+s061Nfm+6pQ0mJy85YQDB0tZBCmls0Vuj22pLwDPmL92+Hvfwwlg== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/signature-v4@^4.2.2": + version "4.2.3" + resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-4.2.3.tgz#abbca5e5fe9158422b3125b2956791a325a27f22" + integrity sha512-pPSQQ2v2vu9vc8iew7sszLd0O09I5TRc5zhY71KA+Ao0xYazIG+uLeHbTJfIWGO3BGVLiXjUr3EEeCcEQLjpWQ== + dependencies: + "@smithy/is-array-buffer" "^3.0.0" + "@smithy/protocol-http" "^4.1.7" + "@smithy/types" "^3.7.1" + "@smithy/util-hex-encoding" "^3.0.0" + "@smithy/util-middleware" "^3.0.10" + "@smithy/util-uri-escape" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/smithy-client@^3.4.3", "@smithy/smithy-client@^3.4.4": + version "3.4.4" + resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-3.4.4.tgz#460870dc97d945fa2f390890359cf09d01131e0f" + integrity sha512-dPGoJuSZqvirBq+yROapBcHHvFjChoAQT8YPWJ820aPHHiowBlB3RL1Q4kPT1hx0qKgJuf+HhyzKi5Gbof4fNA== + dependencies: + "@smithy/core" "^2.5.3" + "@smithy/middleware-endpoint" "^3.2.3" + "@smithy/middleware-stack" "^3.0.10" + "@smithy/protocol-http" "^4.1.7" + "@smithy/types" "^3.7.1" + "@smithy/util-stream" "^3.3.1" + tslib "^2.6.2" + +"@smithy/types@^1.2.0": + version "1.2.0" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-1.2.0.tgz#9dc65767b0ee3d6681704fcc67665d6fc9b6a34e" + integrity sha512-z1r00TvBqF3dh4aHhya7nz1HhvCg4TRmw51fjMrh5do3h+ngSstt/yKlNbHeb9QxJmFbmN8KEVSWgb1bRvfEoA== dependencies: tslib "^2.5.0" -"@smithy/config-resolver@^2.0.10", "@smithy/config-resolver@^2.0.11": - version "2.0.11" - resolved "https://registry.yarnpkg.com/@smithy/config-resolver/-/config-resolver-2.0.11.tgz#20c4711b4e80f94527ee9e4e092cf024471bb09d" - integrity "sha1-IMRxG06A+UUn7p5OCSzwJEcbsJ0= sha512-q97FnlUmbai1c4JlQJgLVBsvSxgV/7Nvg/JK76E1nRq/U5UM56Eqo3dn2fY7JibqgJLg4LPsGdwtIyqyOk35CQ==" +"@smithy/types@^3.7.0", "@smithy/types@^3.7.1": + version "3.7.1" + resolved "https://registry.yarnpkg.com/@smithy/types/-/types-3.7.1.tgz#4af54c4e28351e9101996785a33f2fdbf93debe7" + integrity sha512-XKLcLXZY7sUQgvvWyeaL/qwNPp6V3dWcUjqrQKjSb+tzYiCy340R/c64LV5j+Tnb2GhmunEX0eou+L+m2hJNYA== dependencies: - "@smithy/node-config-provider" "^2.0.13" - "@smithy/types" "^2.3.4" - "@smithy/util-config-provider" "^2.0.0" - "@smithy/util-middleware" "^2.0.3" - tslib "^2.5.0" + tslib "^2.6.2" -"@smithy/credential-provider-imds@^2.0.0", "@smithy/credential-provider-imds@^2.0.13": - version "2.0.13" - resolved "https://registry.yarnpkg.com/@smithy/credential-provider-imds/-/credential-provider-imds-2.0.13.tgz#9904912bc236d25d870add10b6eb138570bf5732" - integrity "sha1-mQSRK8I20l2HCt0QtusThXC/VzI= sha512-/xe3wNoC4j+BeTemH9t2gSKLBfyZmk8LXB2pQm/TOEYi+QhBgT+PSolNDfNAhrR68eggNE17uOimsrnwSkCt4w==" +"@smithy/url-parser@^3.0.10", "@smithy/url-parser@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-3.0.10.tgz#f389985a79766cff4a99af14979f01a17ce318da" + integrity sha512-j90NUalTSBR2NaZTuruEgavSdh8MLirf58LoGSk4AtQfyIymogIhgnGUU2Mga2bkMkpSoC9gxb74xBXL5afKAQ== dependencies: - "@smithy/node-config-provider" "^2.0.13" - "@smithy/property-provider" "^2.0.11" - "@smithy/types" "^2.3.4" - "@smithy/url-parser" "^2.0.10" - tslib "^2.5.0" + "@smithy/querystring-parser" "^3.0.10" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" -"@smithy/eventstream-codec@^2.0.10": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-codec/-/eventstream-codec-2.0.10.tgz#dbd46d0ed13abc61b1f08ab249f3097602752933" - integrity "sha1-29RtDtE6vGGx8IqySfMJdgJ1KTM= sha512-3SSDgX2nIsFwif6m+I4+ar4KDcZX463Noes8ekBgQHitULiWvaDZX8XqPaRQSQ4bl1vbeVXHklJfv66MnVO+lw==" +"@smithy/util-base64@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-3.0.0.tgz#f7a9a82adf34e27a72d0719395713edf0e493017" + integrity sha512-Kxvoh5Qtt0CDsfajiZOCpJxgtPHXOKwmM+Zy4waD43UoEMA+qPxxa98aE/7ZhdnBFZFXMOiBR5xbcaMhLtznQQ== dependencies: - "@aws-crypto/crc32" "3.0.0" - "@smithy/types" "^2.3.4" - "@smithy/util-hex-encoding" "^2.0.0" - tslib "^2.5.0" + "@smithy/util-buffer-from" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" -"@smithy/eventstream-serde-browser@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-browser/-/eventstream-serde-browser-2.0.10.tgz#93054f85194655d7eba27125f4935d247bdc2a8f" - integrity "sha1-kwVPhRlGVdfronEl9JNdJHvcKo8= sha512-/NSUNrWedO9Se80jo/2WcPvqobqCM/0drZ03Kqn1GZpGwVTsdqNj7frVTCUJs/W/JEzOShdMv8ewoKIR7RWPmA==" +"@smithy/util-body-length-browser@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-3.0.0.tgz#86ec2f6256310b4845a2f064e2f571c1ca164ded" + integrity sha512-cbjJs2A1mLYmqmyVl80uoLTJhAcfzMOyPgjwAYusWKMdLeNtzmMz9YxNl3/jRLoxSS3wkqkf0jwNdtXWtyEBaQ== dependencies: - "@smithy/eventstream-serde-universal" "^2.0.10" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" + tslib "^2.6.2" -"@smithy/eventstream-serde-config-resolver@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-config-resolver/-/eventstream-serde-config-resolver-2.0.10.tgz#ea2f6675a4270fc3eccbb9fda4086f611887b510" - integrity "sha1-6i9mdaQnD8Psy7n9pAhvYRiHtRA= sha512-ag1U0vsC5rhRm7okFzsS6YsvyTRe62jIgJ82+Wr4qoOASx7eCDWdjoqLnrdDY0S4UToF9hZAyo4Du/xrSSSk4g==" +"@smithy/util-body-length-node@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-3.0.0.tgz#99a291bae40d8932166907fe981d6a1f54298a6d" + integrity sha512-Tj7pZ4bUloNUP6PzwhN7K386tmSmEET9QtQg0TgdNOnxhZvCssHji+oZTUIuzxECRfG8rdm2PMw2WCFs6eIYkA== dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" + tslib "^2.6.2" -"@smithy/eventstream-serde-node@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-node/-/eventstream-serde-node-2.0.10.tgz#54af54b9719aa8f74fae5885a72e69b33d5661cf" - integrity "sha1-VK9UuXGaqPdPrliFpy5psz1WYc8= sha512-3+VeofxoVCa+dvqcuzEpnFve8EQJKaYR7UslDFpj6UTZfa7Hxr8o1/cbFkTftFo71PxzYVsR+bsD56EbAO432A==" +"@smithy/util-buffer-from@^2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.2.0.tgz#6fc88585165ec73f8681d426d96de5d402021e4b" + integrity sha512-IJdWBbTcMQ6DA0gdNhh/BwrLkDR+ADW5Kr1aZmd4k3DIF6ezMV4R2NIAmT08wQJ3yUK82thHWmC/TnK/wpMMIA== dependencies: - "@smithy/eventstream-serde-universal" "^2.0.10" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" + "@smithy/is-array-buffer" "^2.2.0" + tslib "^2.6.2" -"@smithy/eventstream-serde-universal@^2.0.10": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/eventstream-serde-universal/-/eventstream-serde-universal-2.0.10.tgz#575a6160a12508341c9c345bf3da7422a590aaae" - integrity "sha1-V1phYKElCDQcnDRb89p0IqWQqq4= sha512-JhJJU1ULLsn5kxKfFe8zOF2tibjxlPIvIB71Kn20aa/OFs+lvXBR0hBGswpovyYyckXH3qU8VxuIOEuS+2G+3A==" +"@smithy/util-buffer-from@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-3.0.0.tgz#559fc1c86138a89b2edaefc1e6677780c24594e3" + integrity sha512-aEOHCgq5RWFbP+UDPvPot26EJHjOC+bRgse5A8V3FSShqd5E5UN4qc7zkwsvJPPAVsf73QwYcHN1/gt/rtLwQA== dependencies: - "@smithy/eventstream-codec" "^2.0.10" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" + "@smithy/is-array-buffer" "^3.0.0" + tslib "^2.6.2" -"@smithy/fetch-http-handler@^2.1.5", "@smithy/fetch-http-handler@^2.2.1": - version "2.2.1" - resolved "https://registry.yarnpkg.com/@smithy/fetch-http-handler/-/fetch-http-handler-2.2.1.tgz#a8abbd339c2c3d76456f4d16e65cf934727fc7ad" - integrity "sha1-qKu9M5wsPXZFb00W5lz5NHJ/x60= sha512-bXyM8PBAIKxVV++2ZSNBEposTDjFQ31XWOdHED+2hWMNvJHUoQqFbECg/uhcVOa6vHie2/UnzIZfXBSTpDBnEw==" +"@smithy/util-config-provider@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-3.0.0.tgz#62c6b73b22a430e84888a8f8da4b6029dd5b8efe" + integrity sha512-pbjk4s0fwq3Di/ANL+rCvJMKM5bzAQdE5S/6RL5NXgMExFAi6UgQMPOm5yPaIWPpr+EOXKXRonJ3FoxKf4mCJQ== dependencies: - "@smithy/protocol-http" "^3.0.6" - "@smithy/querystring-builder" "^2.0.10" - "@smithy/types" "^2.3.4" - "@smithy/util-base64" "^2.0.0" - tslib "^2.5.0" + tslib "^2.6.2" -"@smithy/hash-blob-browser@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/hash-blob-browser/-/hash-blob-browser-2.0.10.tgz#fa761e02c9a21b9c4bf827139d65376d50356c69" - integrity "sha1-+nYeAsmiG5xL+CcTnWU3bVA1bGk= sha512-U2+wIWWloOZ9DaRuz2sk9f7A6STRTlwdcv+q6abXDvS0TRDk8KGgUmfV5lCZy8yxFxZIA0hvHDNqcd25r4Hrew==" +"@smithy/util-defaults-mode-browser@^3.0.26": + version "3.0.27" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-3.0.27.tgz#d5df39faee8ad4bb5a6920b208469caa9dda2ccb" + integrity sha512-GV8NvPy1vAGp7u5iD/xNKUxCorE4nQzlyl057qRac+KwpH5zq8wVq6rE3lPPeuFLyQXofPN6JwxL1N9ojGapiQ== dependencies: - "@smithy/chunked-blob-reader" "^2.0.0" - "@smithy/chunked-blob-reader-native" "^2.0.0" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/hash-node@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/hash-node/-/hash-node-2.0.10.tgz#af13889a008880bdc30278b148e0e0b2a6e2d243" - integrity "sha1-rxOImgCIgL3DAnixSODgsqbi0kM= sha512-jSTf6uzPk/Vf+8aQ7tVXeHfjxe9wRXSCqIZcBymSDTf7/YrVxniBdpyN74iI8ZUOx/Pyagc81OK5FROLaEjbXQ==" - dependencies: - "@smithy/types" "^2.3.4" - "@smithy/util-buffer-from" "^2.0.0" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" - -"@smithy/hash-stream-node@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/hash-stream-node/-/hash-stream-node-2.0.10.tgz#6e693b4362fbb031b8fc60e105220874d044ec8d" - integrity "sha1-bmk7Q2L7sDG4/GDhBSIIdNBE7I0= sha512-L58XEGrownZZSpF7Lp0gc0hy+eYKXuPgNz3pQgP5lPFGwBzHdldx2X6o3c6swD6RkcPvTRh0wTUVVGwUotbgnQ==" - dependencies: - "@smithy/types" "^2.3.4" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" - -"@smithy/invalid-dependency@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/invalid-dependency/-/invalid-dependency-2.0.10.tgz#b708e7cfc35214ce664db6aa67465567b97ffd36" - integrity "sha1-twjnz8NSFM5mTbaqZ0ZVZ7l//TY= sha512-zw9p/zsmJ2cFcW4KMz3CJoznlbRvEA6HG2mvEaX5eAca5dq4VGI2MwPDTfmteC/GsnURS4ogoMQ0p6aHM2SDVQ==" - dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/is-array-buffer@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/is-array-buffer/-/is-array-buffer-2.0.0.tgz#8fa9b8040651e7ba0b2f6106e636a91354ff7d34" - integrity "sha1-j6m4BAZR57oLL2EG5japE1T/fTQ= sha512-z3PjFjMyZNI98JFRJi/U0nGoLWMSJlDjAW4QUX2WNZLas5C0CmVV6LJ01JI0k90l7FvpmixjWxPFmENSClQ7ug==" - dependencies: - tslib "^2.5.0" - -"@smithy/md5-js@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/md5-js/-/md5-js-2.0.10.tgz#8480de1b42abc581cf515e2b8e35542e9248f520" - integrity "sha1-hIDeG0KrxYHPUV4rjjVULpJI9SA= sha512-eA/Ova4/UdQUbMlrbBmnewmukH0zWU6C67HFFR/719vkFNepbnliGjmGksQ9vylz9eD4nfGkZZ5NKZMAcUuzjQ==" - dependencies: - "@smithy/types" "^2.3.4" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" - -"@smithy/middleware-content-length@^2.0.11": - version "2.0.12" - resolved "https://registry.yarnpkg.com/@smithy/middleware-content-length/-/middleware-content-length-2.0.12.tgz#e6f874f5eef880561f774a4376b73f04b97efc53" - integrity "sha1-5vh09e74gFYfd0pDdrc/BLl+/FM= sha512-QRhJTo5TjG7oF7np6yY4ZO9GDKFVzU/GtcqUqyEa96bLHE3yZHgNmsolOQ97pfxPHmFhH4vDP//PdpAIN3uI1Q==" - dependencies: - "@smithy/protocol-http" "^3.0.6" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/middleware-endpoint@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/middleware-endpoint/-/middleware-endpoint-2.0.10.tgz#c11d9f75549116453eea0e812e17ec7917ce5bb1" - integrity "sha1-wR2fdVSRFkU+6g6BLhfseRfOW7E= sha512-O6m4puZc16xfenotZUHL4bRlMrwf4gTp+0I5l954M5KNd3dOK18P+FA/IIUgnXF/dX6hlCUcJkBp7nAzwrePKA==" - dependencies: - "@smithy/middleware-serde" "^2.0.10" - "@smithy/types" "^2.3.4" - "@smithy/url-parser" "^2.0.10" - "@smithy/util-middleware" "^2.0.3" - tslib "^2.5.0" - -"@smithy/middleware-retry@^2.0.12": - version "2.0.13" - resolved "https://registry.yarnpkg.com/@smithy/middleware-retry/-/middleware-retry-2.0.13.tgz#ef33b1511a4b01a77e54567165b78e6d0c266e88" - integrity "sha1-7zOxURpLAad+VFZxZbeObQwmbog= sha512-zuOva8xgWC7KYG8rEXyWIcZv2GWszO83DCTU6IKcf/FKu6OBmSE+EYv3EUcCGY+GfiwCX0EyJExC9Lpq9b0w5Q==" - dependencies: - "@smithy/node-config-provider" "^2.0.13" - "@smithy/protocol-http" "^3.0.6" - "@smithy/service-error-classification" "^2.0.3" - "@smithy/types" "^2.3.4" - "@smithy/util-middleware" "^2.0.3" - "@smithy/util-retry" "^2.0.3" - tslib "^2.5.0" - uuid "^8.3.2" - -"@smithy/middleware-serde@^2.0.10", "@smithy/middleware-serde@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/middleware-serde/-/middleware-serde-2.0.10.tgz#4b0e5f838c7d7621cabf7cfdd6cec4c7f4d52a3f" - integrity "sha1-Sw5fg4x9diHKv3z91s7Ex/TVKj8= sha512-+A0AFqs768256H/BhVEsBF6HijFbVyAwYRVXY/izJFkTalVWJOp4JA0YdY0dpXQd+AlW0tzs+nMQCE1Ew+DcgQ==" - dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/middleware-stack@^2.0.2", "@smithy/middleware-stack@^2.0.4": - version "2.0.4" - resolved "https://registry.yarnpkg.com/@smithy/middleware-stack/-/middleware-stack-2.0.4.tgz#cf199dd4d6eb3a3562e6757804faa91165693395" - integrity "sha1-zxmd1NbrOjVi5nV4BPqpEWVpM5U= sha512-MW0KNKfh8ZGLagMZnxcLJWPNXoKqW6XV/st5NnCBmmA2e2JhrUjU0AJ5Ca/yjTyNEKs3xH7AQDwp1YmmpEpmQQ==" - dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/node-config-provider@^2.0.12", "@smithy/node-config-provider@^2.0.13": - version "2.0.13" - resolved "https://registry.yarnpkg.com/@smithy/node-config-provider/-/node-config-provider-2.0.13.tgz#26c95cebbb8bf9ef5dd703ab4e00ff80de34e15f" - integrity "sha1-Jslc67uL+e9d1wOrTgD/gN404V8= sha512-pPpLqYuJcOq1sj1EGu+DoZK47DUS4gepqSTNgRezmrjnzNlSU2/Dcc9Ebzs+WZ0Z5vXKazuE+k+NksFLo07/AA==" - dependencies: - "@smithy/property-provider" "^2.0.11" - "@smithy/shared-ini-file-loader" "^2.0.12" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/node-http-handler@^2.1.5", "@smithy/node-http-handler@^2.1.6": - version "2.1.6" - resolved "https://registry.yarnpkg.com/@smithy/node-http-handler/-/node-http-handler-2.1.6.tgz#c2913363bbf28f315461bd54ef9a5394f1686776" - integrity "sha1-wpEzY7vyjzFUYb1U75pTlPFoZ3Y= sha512-NspvD3aCwiUNtoSTcVHz0RZz1tQ/SaRIe1KPF+r0mAdCZ9eWuhIeJT8ZNPYa1ITn7/Lgg64IyFjqPynZ8KnYQw==" - dependencies: - "@smithy/abort-controller" "^2.0.10" - "@smithy/protocol-http" "^3.0.6" - "@smithy/querystring-builder" "^2.0.10" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/property-provider@^2.0.0", "@smithy/property-provider@^2.0.11": - version "2.0.11" - resolved "https://registry.yarnpkg.com/@smithy/property-provider/-/property-provider-2.0.11.tgz#c6e03e4f6f886851339c3dfaf8cd8ae3b2878fa3" - integrity "sha1-xuA+T2+IaFEznD36+M2K47KHj6M= sha512-kzuOadu6XvrnlF1iXofpKXYmo4oe19st9/DE8f5gHNaFepb4eTkR8gD8BSdTnNnv7lxfv6uOwZPg4VS6hemX1w==" - dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/protocol-http@^3.0.5", "@smithy/protocol-http@^3.0.6": - version "3.0.6" - resolved "https://registry.yarnpkg.com/@smithy/protocol-http/-/protocol-http-3.0.6.tgz#c33c128cc0f7096bf4fcdcc6d14d156ba5cd5b7c" - integrity "sha1-wzwSjMD3CWv0/NzG0U0Va6XNW3w= sha512-F0jAZzwznMmHaggiZgc7YoS08eGpmLvhVktY/Taz6+OAOHfyIqWSDNgFqYR+WHW9z5fp2XvY4mEUrQgYMQ71jw==" - dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/querystring-builder@^2.0.10": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/querystring-builder/-/querystring-builder-2.0.10.tgz#b06aa958b6ec1c56254d8cc41a19882625fd1c05" - integrity "sha1-sGqpWLbsHFYlTYzEGhmIJiX9HAU= sha512-uujJGp8jzrrU1UHme8sUKEbawQTcTmUWsh8rbGXYD/lMwNLQ+9jQ9dMDWbbH9Hpoa9RER1BeL/38WzGrbpob2w==" - dependencies: - "@smithy/types" "^2.3.4" - "@smithy/util-uri-escape" "^2.0.0" - tslib "^2.5.0" - -"@smithy/querystring-parser@^2.0.10": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/querystring-parser/-/querystring-parser-2.0.10.tgz#074d770a37feafb0d550094dd8463bdff58515f5" - integrity "sha1-B013Cjf+r7DVUAlN2EY73/WFFfU= sha512-WSD4EU60Q8scacT5PIpx4Bahn6nWpt+MiYLcBkFt6fOj7AssrNeaNIU2Z0g40ftVmrwLcEOIKGX92ynbVDb3ZA==" - dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/service-error-classification@^2.0.3": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@smithy/service-error-classification/-/service-error-classification-2.0.3.tgz#4c7de61d06db5f72437557d429bd74c74988b19e" - integrity "sha1-TH3mHQbbX3JDdVfUKb10x0mIsZ4= sha512-b+m4QCHXb7oKAkM/jHwHrl5gpqhFoMTHF643L0/vAEkegrcUWyh1UjyoHttuHcP5FnHVVy4EtpPtLkEYD+xMFw==" - dependencies: - "@smithy/types" "^2.3.4" - -"@smithy/shared-ini-file-loader@^2.0.12", "@smithy/shared-ini-file-loader@^2.0.6": - version "2.0.12" - resolved "https://registry.yarnpkg.com/@smithy/shared-ini-file-loader/-/shared-ini-file-loader-2.0.12.tgz#30c8a7a36f49734fde2f052bfaeaaf40c1980b55" - integrity "sha1-MMino29Jc0/eLwUr+uqvQMGYC1U= sha512-umi0wc4UBGYullAgYNUVfGLgVpxQyES47cnomTqzCKeKO5oudO4hyDNj+wzrOjqDFwK2nWYGVgS8Y0JgGietrw==" - dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/signature-v4@^2.0.0": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/signature-v4/-/signature-v4-2.0.10.tgz#89161b3f59071b77713cdf06f98b2e6780580742" - integrity "sha1-iRYbP1kHG3dxPN8G+YsuZ4BYB0I= sha512-S6gcP4IXfO/VMswovrhxPpqvQvMal7ZRjM4NvblHSPpE5aNBYx67UkHFF3kg0hR3tJKqNpBGbxwq0gzpdHKLRA==" - dependencies: - "@smithy/eventstream-codec" "^2.0.10" - "@smithy/is-array-buffer" "^2.0.0" - "@smithy/types" "^2.3.4" - "@smithy/util-hex-encoding" "^2.0.0" - "@smithy/util-middleware" "^2.0.3" - "@smithy/util-uri-escape" "^2.0.0" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" - -"@smithy/smithy-client@^2.1.6", "@smithy/smithy-client@^2.1.9": - version "2.1.9" - resolved "https://registry.yarnpkg.com/@smithy/smithy-client/-/smithy-client-2.1.9.tgz#5a0a185947ae4e66d12d2a6135628dd2fc36924c" - integrity "sha1-WgoYWUeuTmbRLSphNWKN0vw2kkw= sha512-HTicQSn/lOcXKJT+DKJ4YMu51S6PzbWsO8Z6Pwueo30mSoFKXg5P0BDkg2VCDqCVR0mtddM/F6hKhjW6YAV/yg==" - dependencies: - "@smithy/middleware-stack" "^2.0.4" - "@smithy/types" "^2.3.4" - "@smithy/util-stream" "^2.0.14" - tslib "^2.5.0" - -"@smithy/types@^2.3.3", "@smithy/types@^2.3.4": - version "2.3.4" - resolved "https://registry.yarnpkg.com/@smithy/types/-/types-2.3.4.tgz#3b9bc15000af0a0b1f4fda741f78c1580ba15e92" - integrity "sha1-O5vBUACvCgsfT9p0H3jBWAuhXpI= sha512-D7xlM9FOMFyFw7YnMXn9dK2KuN6+JhnrZwVt1fWaIu8hCk5CigysweeIT/H/nCo4YV+s8/oqUdLfexbkPZtvqw==" - dependencies: - tslib "^2.5.0" - -"@smithy/url-parser@^2.0.10", "@smithy/url-parser@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/url-parser/-/url-parser-2.0.10.tgz#3261a463b87901d7686f66a9f26efb9f57d8d555" - integrity "sha1-MmGkY7h5Addob2ap8m77n1fY1VU= sha512-4TXQFGjHcqru8aH5VRB4dSnOFKCYNX6SR1Do6fwxZ+ExT2onLsh2W77cHpks7ma26W5jv6rI1u7d0+KX9F0aOw==" - dependencies: - "@smithy/querystring-parser" "^2.0.10" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/util-base64@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-base64/-/util-base64-2.0.0.tgz#1beeabfb155471d1d41c8d0603be1351f883c444" - integrity "sha1-G+6r+xVUcdHUHI0GA74TUfiDxEQ= sha512-Zb1E4xx+m5Lud8bbeYi5FkcMJMnn+1WUnJF3qD7rAdXpaL7UjkFQLdmW5fHadoKbdHpwH9vSR8EyTJFHJs++tA==" - dependencies: - "@smithy/util-buffer-from" "^2.0.0" - tslib "^2.5.0" - -"@smithy/util-body-length-browser@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-body-length-browser/-/util-body-length-browser-2.0.0.tgz#5447853003b4c73da3bc5f3c5e82c21d592d1650" - integrity "sha1-VEeFMAO0xz2jvF88XoLCHVktFlA= sha512-JdDuS4ircJt+FDnaQj88TzZY3+njZ6O+D3uakS32f2VNnDo3vyEuNdBOh/oFd8Df1zSZOuH1HEChk2AOYDezZg==" - dependencies: - tslib "^2.5.0" - -"@smithy/util-body-length-node@^2.1.0": - version "2.1.0" - resolved "https://registry.yarnpkg.com/@smithy/util-body-length-node/-/util-body-length-node-2.1.0.tgz#313a5f7c5017947baf5fa018bfc22628904bbcfa" - integrity "sha1-MTpffFAXlHuvX6AYv8ImKJBLvPo= sha512-/li0/kj/y3fQ3vyzn36NTLGmUwAICb7Jbe/CsWCktW363gh1MOcpEcSO3mJ344Gv2dqz8YJCLQpb6hju/0qOWw==" - dependencies: - tslib "^2.5.0" - -"@smithy/util-buffer-from@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-buffer-from/-/util-buffer-from-2.0.0.tgz#7eb75d72288b6b3001bc5f75b48b711513091deb" - integrity "sha1-frddciiLazABvF91tItxFRMJHes= sha512-/YNnLoHsR+4W4Vf2wL5lGv0ksg8Bmk3GEGxn2vEQt52AQaPSCuaO5PM5VM7lP1K9qHRKHwrPGktqVoAHKWHxzw==" - dependencies: - "@smithy/is-array-buffer" "^2.0.0" - tslib "^2.5.0" - -"@smithy/util-config-provider@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-config-provider/-/util-config-provider-2.0.0.tgz#4dd6a793605559d94267312fd06d0f58784b4c38" - integrity "sha1-Tdank2BVWdlCZzEv0G0PWHhLTDg= sha512-xCQ6UapcIWKxXHEU4Mcs2s7LcFQRiU3XEluM2WcCjjBtQkUN71Tb+ydGmJFPxMUrW/GWMgQEEGipLym4XG0jZg==" - dependencies: - tslib "^2.5.0" - -"@smithy/util-defaults-mode-browser@^2.0.10": - version "2.0.13" - resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-browser/-/util-defaults-mode-browser-2.0.13.tgz#8136955f1bef6e66cb8a8702693e7685dcd33e26" - integrity "sha1-gTaVXxvvbmbLiocCaT52hdzTPiY= sha512-UmmOdUzaQjqdsl1EjbpEaQxM0VDFqTj6zDuI26/hXN7L/a1k1koTwkYpogHMvunDX3fjrQusg5gv1Td4UsGyog==" - dependencies: - "@smithy/property-provider" "^2.0.11" - "@smithy/smithy-client" "^2.1.9" - "@smithy/types" "^2.3.4" + "@smithy/property-provider" "^3.1.10" + "@smithy/smithy-client" "^3.4.4" + "@smithy/types" "^3.7.1" bowser "^2.11.0" - tslib "^2.5.0" + tslib "^2.6.2" -"@smithy/util-defaults-mode-node@^2.0.12": - version "2.0.15" - resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-2.0.15.tgz#24f7b9de978206909ced7b522f24e7f450187372" - integrity "sha1-JPe53peCBpCc7XtSLyTn9FAYc3I= sha512-g6J7MHAibVPMTlXyH3mL+Iet4lMJKFVhsOhJmn+IKG81uy9m42CkRSDlwdQSJAcprLQBIaOPdFxNXQvrg2w1Uw==" +"@smithy/util-defaults-mode-node@^3.0.26": + version "3.0.27" + resolved "https://registry.yarnpkg.com/@smithy/util-defaults-mode-node/-/util-defaults-mode-node-3.0.27.tgz#a7248c9d9cb620827ab57ef9d1867bfe8aef42d0" + integrity sha512-7+4wjWfZqZxZVJvDutO+i1GvL6bgOajEkop4FuR6wudFlqBiqwxw3HoH6M9NgeCd37km8ga8NPp2JacQEtAMPg== dependencies: - "@smithy/config-resolver" "^2.0.11" - "@smithy/credential-provider-imds" "^2.0.13" - "@smithy/node-config-provider" "^2.0.13" - "@smithy/property-provider" "^2.0.11" - "@smithy/smithy-client" "^2.1.9" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" + "@smithy/config-resolver" "^3.0.12" + "@smithy/credential-provider-imds" "^3.2.7" + "@smithy/node-config-provider" "^3.1.11" + "@smithy/property-provider" "^3.1.10" + "@smithy/smithy-client" "^3.4.4" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" -"@smithy/util-hex-encoding@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-2.0.0.tgz#0aa3515acd2b005c6d55675e377080a7c513b59e" - integrity "sha1-CqNRWs0rAFxtVWdeN3CAp8UTtZ4= sha512-c5xY+NUnFqG6d7HFh1IFfrm3mGl29lC+vF+geHv4ToiuJCBmIfzx6IeHLg+OgRdPFKDXIw6pvi+p3CsscaMcMA==" +"@smithy/util-endpoints@^2.1.5": + version "2.1.6" + resolved "https://registry.yarnpkg.com/@smithy/util-endpoints/-/util-endpoints-2.1.6.tgz#720cbd1a616ad7c099b77780f0cb0f1f9fc5d2df" + integrity sha512-mFV1t3ndBh0yZOJgWxO9J/4cHZVn5UG1D8DeCc6/echfNkeEJWu9LD7mgGH5fHrEdR7LDoWw7PQO6QiGpHXhgA== + dependencies: + "@smithy/node-config-provider" "^3.1.11" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/util-hex-encoding@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-hex-encoding/-/util-hex-encoding-3.0.0.tgz#32938b33d5bf2a15796cd3f178a55b4155c535e6" + integrity sha512-eFndh1WEK5YMUYvy3lPlVmYY/fZcQE1D8oSf41Id2vCeIkKJXPcYDCZD+4+xViI6b1XSd7tE+s5AmXzz5ilabQ== + dependencies: + tslib "^2.6.2" + +"@smithy/util-middleware@^3.0.10", "@smithy/util-middleware@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-3.0.10.tgz#ab8be99f1aaafe5a5490c344f27a264b72b7592f" + integrity sha512-eJO+/+RsrG2RpmY68jZdwQtnfsxjmPxzMlQpnHKjFPwrYqvlcT+fHdT+ZVwcjlWSrByOhGr9Ff2GG17efc192A== + dependencies: + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/util-retry@^3.0.10", "@smithy/util-retry@^3.0.9": + version "3.0.10" + resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-3.0.10.tgz#fc13e1b30e87af0cbecadf29ca83b171e2040440" + integrity sha512-1l4qatFp4PiU6j7UsbasUHL2VU023NRB/gfaa1M0rDqVrRN4g3mCArLRyH3OuktApA4ye+yjWQHjdziunw2eWA== + dependencies: + "@smithy/service-error-classification" "^3.0.10" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" + +"@smithy/util-stream@^3.3.0", "@smithy/util-stream@^3.3.1": + version "3.3.1" + resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-3.3.1.tgz#a2636f435637ef90d64df2bb8e71cd63236be112" + integrity sha512-Ff68R5lJh2zj+AUTvbAU/4yx+6QPRzg7+pI7M1FbtQHcRIp7xvguxVsQBKyB3fwiOwhAKu0lnNyYBaQfSW6TNw== + dependencies: + "@smithy/fetch-http-handler" "^4.1.1" + "@smithy/node-http-handler" "^3.3.1" + "@smithy/types" "^3.7.1" + "@smithy/util-base64" "^3.0.0" + "@smithy/util-buffer-from" "^3.0.0" + "@smithy/util-hex-encoding" "^3.0.0" + "@smithy/util-utf8" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-uri-escape@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-1.1.0.tgz#a8c5edaf19c0efdb9b51661e840549cf600a1808" + integrity sha512-/jL/V1xdVRt5XppwiaEU8Etp5WHZj609n0xMTuehmCqdoOFbId1M+aEeDWZsQ+8JbEB/BJ6ynY2SlYmOaKtt8w== dependencies: tslib "^2.5.0" -"@smithy/util-middleware@^2.0.2", "@smithy/util-middleware@^2.0.3": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@smithy/util-middleware/-/util-middleware-2.0.3.tgz#478cbf957eaffa36aed624350be342bbf15d3c42" - integrity "sha1-R4y/lX6v+jau1iQ1C+NCu/FdPEI= sha512-+FOCFYOxd2HO7v/0hkFSETKf7FYQWa08wh/x/4KUeoVBnLR4juw8Qi+TTqZI6E2h5LkzD9uOaxC9lAjrpVzaaA==" +"@smithy/util-uri-escape@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-3.0.0.tgz#e43358a78bf45d50bb736770077f0f09195b6f54" + integrity sha512-LqR7qYLgZTD7nWLBecUi4aqolw8Mhza9ArpNEQ881MJJIU2sE5iHCK6TdyqqzcDLy0OPe10IY4T8ctVdtynubg== dependencies: - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/util-retry@^2.0.2", "@smithy/util-retry@^2.0.3": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@smithy/util-retry/-/util-retry-2.0.3.tgz#a053855ddb51800bd679da03454cf626bc440918" - integrity "sha1-oFOFXdtRgAvWedoDRUz2JrxECRg= sha512-gw+czMnj82i+EaH7NL7XKkfX/ZKrCS2DIWwJFPKs76bMgkhf0y1C94Lybn7f8GkBI9lfIOUdPYtzm19zQOC8sw==" - dependencies: - "@smithy/service-error-classification" "^2.0.3" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" - -"@smithy/util-stream@^2.0.12", "@smithy/util-stream@^2.0.14": - version "2.0.14" - resolved "https://registry.yarnpkg.com/@smithy/util-stream/-/util-stream-2.0.14.tgz#3fdd934e2bced80331dcaff18aefbcfe39ebf3cd" - integrity "sha1-P92TTivO2AMx3K/xiu+8/jnr880= sha512-XjvlDYe+9DieXhLf7p+EgkXwFtl34kHZcWfHnc5KaILbhyVfDLWuqKTFx6WwCFqb01iFIig8trGwExRIqqkBYg==" - dependencies: - "@smithy/fetch-http-handler" "^2.2.1" - "@smithy/node-http-handler" "^2.1.6" - "@smithy/types" "^2.3.4" - "@smithy/util-base64" "^2.0.0" - "@smithy/util-buffer-from" "^2.0.0" - "@smithy/util-hex-encoding" "^2.0.0" - "@smithy/util-utf8" "^2.0.0" - tslib "^2.5.0" - -"@smithy/util-uri-escape@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-uri-escape/-/util-uri-escape-2.0.0.tgz#19955b1a0f517a87ae77ac729e0e411963dfda95" - integrity "sha1-GZVbGg9Reoeud6xyng5BGWPf2pU= sha512-ebkxsqinSdEooQduuk9CbKcI+wheijxEb3utGXkCoYQkJnwTnLbH1JXGimJtUkQwNQbsbuYwG2+aFVyZf5TLaw==" - dependencies: - tslib "^2.5.0" + tslib "^2.6.2" "@smithy/util-utf8@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.0.0.tgz#b4da87566ea7757435e153799df9da717262ad42" - integrity "sha1-tNqHVm6ndXQ14VN5nfnacXJirUI= sha512-rctU1VkziY84n5OXe3bPNpKR001ZCME2JCaBBFgtiM2hfKbHFudc/BkMuPab8hRbLd0j3vbnBTTZ1igBf0wgiQ==" + version "2.3.0" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-2.3.0.tgz#dd96d7640363259924a214313c3cf16e7dd329c5" + integrity sha512-R8Rdn8Hy72KKcebgLiv8jQcQkXoLMOGGv5uI1/k0l+snqkOzQ1R0ChUBCxWMlBsFMekWjq0wRudIweFs7sKT5A== dependencies: - "@smithy/util-buffer-from" "^2.0.0" - tslib "^2.5.0" + "@smithy/util-buffer-from" "^2.2.0" + tslib "^2.6.2" -"@smithy/util-waiter@^2.0.9": - version "2.0.10" - resolved "https://registry.yarnpkg.com/@smithy/util-waiter/-/util-waiter-2.0.10.tgz#6cd28af8340ab54fa9adf10d193c4476a5673363" - integrity "sha1-bNKK+DQKtU+prfENGTxEdqVnM2M= sha512-yQjwWVrwYw+/f3hFQccE3zZF7lk6N6xtNcA6jvhWFYhnyKAm6B2mX8Gzftl0TbgoPUpzCvKYlvhaEpVtRpVfVw==" +"@smithy/util-utf8@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@smithy/util-utf8/-/util-utf8-3.0.0.tgz#1a6a823d47cbec1fd6933e5fc87df975286d9d6a" + integrity sha512-rUeT12bxFnplYDe815GXbq/oixEGHfRFFtcTF3YdDi/JaENIM6aSYYLJydG83UNzLXeRI5K8abYd/8Sp/QM0kA== dependencies: - "@smithy/abort-controller" "^2.0.10" - "@smithy/types" "^2.3.4" - tslib "^2.5.0" + "@smithy/util-buffer-from" "^3.0.0" + tslib "^2.6.2" + +"@smithy/util-waiter@^3.1.8": + version "3.1.9" + resolved "https://registry.yarnpkg.com/@smithy/util-waiter/-/util-waiter-3.1.9.tgz#1330ce2e79b58419d67755d25bce7a226e32dc6d" + integrity sha512-/aMXPANhMOlMPjfPtSrDfPeVP8l56SJlz93xeiLmhLe5xvlXA5T3abZ2ilEsDEPeY9T/wnN/vNGn9wa1SbufWA== + dependencies: + "@smithy/abort-controller" "^3.1.8" + "@smithy/types" "^3.7.1" + tslib "^2.6.2" "@socket.io/component-emitter@~3.1.0": version "3.1.0" @@ -5045,15 +5289,15 @@ request "^2.88.0" webfinger "^0.4.2" -"@techteamer/ocsp@1.0.0": - version "1.0.0" - resolved "https://registry.yarnpkg.com/@techteamer/ocsp/-/ocsp-1.0.0.tgz#7b82b02093fbe351e915bb37685ac1ac5a1233d3" - integrity sha512-lNAOoFHaZN+4huo30ukeqVrUmfC+avoEBYQ11QAnAw1PFhnI5oBCg8O/TNiCoEWix7gNGBIEjrQwtPREqKMPog== +"@techteamer/ocsp@1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@techteamer/ocsp/-/ocsp-1.0.1.tgz#420f80c64ff0f74a70b65c88e4031c03a9da6ded" + integrity sha512-q4pW5wAC6Pc3JI8UePwE37CkLQ5gDGZMgjSX4MEEm4D4Di59auDQ8UNIDzC4gRnPNmmcwjpPxozq8p5pjiOmOw== dependencies: asn1.js "^5.4.1" asn1.js-rfc2560 "^5.0.1" asn1.js-rfc5280 "^3.0.0" - async "^3.2.1" + async "^3.2.4" simple-lru-cache "^0.0.2" "@tediousjs/connection-string@^0.5.0": @@ -5500,6 +5744,13 @@ "@types/koa-compose" "*" "@types/node" "*" +"@types/koa__router@12.0.4": + version "12.0.4" + resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-12.0.4.tgz#a1f9afec9dc7e7d9fa1252d1938c44b403e19a28" + integrity sha512-Y7YBbSmfXZpa/m5UGGzb7XadJIRBRnwNY9cdAojZGp65Cpe5MAP3mOZE7e3bImt8dfKS4UFcR16SLH8L/z7PBw== + dependencies: + "@types/koa" "*" + "@types/koa__router@8.0.8": version "8.0.8" resolved "https://registry.yarnpkg.com/@types/koa__router/-/koa__router-8.0.8.tgz#b1e0e9a512498777d3366bbdf0e853df27ec831c" @@ -5547,10 +5798,10 @@ resolved "https://registry.yarnpkg.com/@types/ms/-/ms-0.7.31.tgz#31b7ca6407128a3d2bbc27fe2d21b345397f6197" integrity sha512-iiUgKzV9AuaEkZqkOLDIvlQiL6ltuZd9tGcW3gwpnX8JbuiuhFlEGmmFXEXkN50Cvq7Os88IY2v0dkDqXYWVgA== -"@types/mssql@9.1.4": - version "9.1.4" - resolved "https://registry.yarnpkg.com/@types/mssql/-/mssql-9.1.4.tgz#d485b06494a76d15b957e0952305c55053bac366" - integrity sha512-st2ryK+viraRuptxcGs+66J0RrABytxhGxUlpWcOniNPzpnxIaeNhPJVM3lZn1r+s/6lQARYID6Z+MBoseSD8g== +"@types/mssql@9.1.5": + version "9.1.5" + resolved "https://registry.yarnpkg.com/@types/mssql/-/mssql-9.1.5.tgz#1574a5870aeb029c6d787861af101161b9b8d3b6" + integrity sha512-Q9EsgXwuRoX5wvUSu24YfbKMbFChv7pZ/jeCzPkj47ehcuXYsBcfogwrtVFosSjinD4Q/MY2YPGk9Yy1cM2Ywg== dependencies: "@types/node" "*" "@types/tedious" "*" @@ -5564,7 +5815,15 @@ "@types/node" "*" form-data "^3.0.0" -"@types/node-fetch@^2.5.0", "@types/node-fetch@^2.6.4": +"@types/node-fetch@^2.5.0": + version "2.6.12" + resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.12.tgz#8ab5c3ef8330f13100a7479e2cd56d3386830a03" + integrity sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA== + dependencies: + "@types/node" "*" + form-data "^4.0.0" + +"@types/node-fetch@^2.6.4": version "2.6.11" resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.11.tgz#9b39b78665dae0e82a08f02f4967d62c66f95d24" integrity sha512-24xFj9R5+rfQJLRyM56qh+wnVSYhyXC2tkoBndtY0U+vubqNsYXGjufB2nn8Q6gt0LrARwL6UBtMCSVCwl4B1g== @@ -5591,6 +5850,13 @@ dependencies: undici-types "~5.26.4" +"@types/node@>=18", "@types/node@^22.9.0": + version "22.9.1" + resolved "https://registry.yarnpkg.com/@types/node/-/node-22.9.1.tgz#bdf91c36e0e7ecfb7257b2d75bf1b206b308ca71" + integrity sha512-p8Yy/8sw1caA8CdRIQBG5tiLHmxtQKObCijiAa9Ez+d4+PRffM4054xbju0msf+cvhJpnFEeNjxmVT/0ipktrg== + dependencies: + undici-types "~6.19.8" + "@types/node@>=8.0.0 <15": version "14.18.37" resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.37.tgz#0bfcd173e8e1e328337473a8317e37b3b14fd30d" @@ -5615,13 +5881,6 @@ dependencies: undici-types "~5.26.4" -"@types/node@^22.9.0": - version "22.9.1" - resolved "https://registry.yarnpkg.com/@types/node/-/node-22.9.1.tgz#bdf91c36e0e7ecfb7257b2d75bf1b206b308ca71" - integrity sha512-p8Yy/8sw1caA8CdRIQBG5tiLHmxtQKObCijiAa9Ez+d4+PRffM4054xbju0msf+cvhJpnFEeNjxmVT/0ipktrg== - dependencies: - undici-types "~6.19.8" - "@types/normalize-package-data@^2.4.0": version "2.4.1" resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.1.tgz#d3357479a0fdfdd5907fe67e17e0a85c906e1301" @@ -5773,10 +6032,10 @@ "@types/pouchdb-core" "*" "@types/pouchdb-find" "*" -"@types/pouchdb@6.4.0", "@types/pouchdb@^6.4.0": - version "6.4.0" - resolved "https://registry.yarnpkg.com/@types/pouchdb/-/pouchdb-6.4.0.tgz#f9c41ca64b23029f9bf2eb4bf6956e6431cb79f8" - integrity sha512-eGCpX+NXhd5VLJuJMzwe3L79fa9+IDTrAG3CPaf4s/31PD56hOrhDJTSmRELSXuiqXr6+OHzzP0PldSaWsFt7w== +"@types/pouchdb@6.4.2", "@types/pouchdb@^6.4.0": + version "6.4.2" + resolved "https://registry.yarnpkg.com/@types/pouchdb/-/pouchdb-6.4.2.tgz#54777533d86f4abd1a3989b272e085323623bbe1" + integrity sha512-YsI47rASdtzR+3V3JE2UKY58snhm0AglHBpyckQBkRYoCbTvGagXHtV0x5n8nzN04jQmvTG+Sm85cIzKT3KXBA== dependencies: "@types/pouchdb-adapter-cordova-sqlite" "*" "@types/pouchdb-adapter-fruitdown" "*" @@ -5991,9 +6250,9 @@ integrity sha512-Q5vtl1W5ue16D+nIaW8JWebSSraJVlK+EthKn7e7UcD4KWsaSJ8BqGPXNaPghgtcn/fhvrN17Tv8ksUsQpiplw== "@types/triple-beam@^1.3.2": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@types/triple-beam/-/triple-beam-1.3.2.tgz#38ecb64f01aa0d02b7c8f4222d7c38af6316fef8" - integrity sha512-txGIh+0eDFzKGC25zORnswy+br1Ha7hj5cMVwKIU7+s0U2AxxJru/jZSMU6OC9MJWP6+pc/hc6ZjyZShpsyY2g== + version "1.3.5" + resolved "https://registry.yarnpkg.com/@types/triple-beam/-/triple-beam-1.3.5.tgz#74fef9ffbaa198eb8b588be029f38b00299caa2c" + integrity sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw== "@types/tunnel@^0.0.3": version "0.0.3" @@ -6437,6 +6696,18 @@ abstract-leveldown@^6.2.1: level-supports "~1.0.0" xtend "~4.0.0" +abstract-leveldown@^7.2.0: + version "7.2.0" + resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-7.2.0.tgz#08d19d4e26fb5be426f7a57004851b39e1795a2e" + integrity sha512-DnhQwcFEaYsvYDnACLZhMmCWd3rkOeEvglpa4q5i/5Jlm3UIsWaxVzuXvDLFCSCWRO3yy2/+V/G7FusFgejnfQ== + dependencies: + buffer "^6.0.3" + catering "^2.0.0" + is-buffer "^2.0.5" + level-concat-iterator "^3.0.0" + level-supports "^2.0.1" + queue-microtask "^1.2.3" + abstract-leveldown@~0.12.0, abstract-leveldown@~0.12.1: version "0.12.4" resolved "https://registry.yarnpkg.com/abstract-leveldown/-/abstract-leveldown-0.12.4.tgz#29e18e632e60e4e221d5810247852a63d7b2e410" @@ -6486,10 +6757,10 @@ acorn-globals@^7.0.0: acorn "^8.1.0" acorn-walk "^8.0.2" -acorn-import-assertions@^1.9.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/acorn-import-assertions/-/acorn-import-assertions-1.9.0.tgz#507276249d684797c84e0734ef84860334cfb1ac" - integrity sha512-cmMwop9x+8KFhxvKrKfPYmN6/pKTYYHBqLa0DfvVZcKMJWNyWLnaqND7dx/qn66R7ewM1UX5XMaDVP5wlVTaVA== +acorn-import-attributes@^1.9.5: + version "1.9.5" + resolved "https://registry.yarnpkg.com/acorn-import-attributes/-/acorn-import-attributes-1.9.5.tgz#7eb1557b1ba05ef18b5ed0ec67591bfab04688ef" + integrity sha512-n02Vykv5uA3eHGM/Z2dQrcD56kL8TyDb2p1+0P83PClMnC/nc+anbQRhIOWnSq4Ke/KvDPrY3C9hDtC/A3eHnQ== acorn-jsx@^5.3.2: version "5.3.2" @@ -6535,7 +6806,7 @@ agent-base@6, agent-base@^6.0.2: dependencies: debug "4" -agent-base@^7.0.2: +agent-base@^7.0.2, agent-base@^7.1.0: version "7.1.1" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-7.1.1.tgz#bdbded7dfb096b751a2a087eeeb9664725b2e317" integrity sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA== @@ -7012,11 +7283,11 @@ async-lock@^1.4.1: async-retry@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/async-retry/-/async-retry-1.3.3.tgz#0e7f36c04d8478e7a58bdbed80cedf977785f280" - integrity "sha1-Dn82wE2EeOeli9vtgM7fl3eF8oA= sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==" + integrity sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw== dependencies: retry "0.13.1" -async@^3.2.1, async@^3.2.3, async@^3.2.4: +async@^3.2.3, async@^3.2.4: version "3.2.5" resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== @@ -7055,25 +7326,31 @@ available-typed-arrays@^1.0.7: dependencies: possible-typed-array-names "^1.0.0" +await-to-js@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/await-to-js/-/await-to-js-3.0.0.tgz#70929994185616f4675a91af6167eb61cc92868f" + integrity sha512-zJAaP9zxTcvTHRlejau3ZOY4V7SRpiByf3/dxx2uyKxxor19tpmpV2QRsTKikckwhaPmr2dVpxxMr7jOCYVp5g== + aws-cloudfront-sign@3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/aws-cloudfront-sign/-/aws-cloudfront-sign-3.0.2.tgz#da5273b0301bcd70312c8c76293d5fec6d414f0a" integrity sha512-Z/yOGZ3Hd1rhYbY13mtRiLCbCDC1Xf/v+dQUyUwMLnyunD/nfDZd/2LMZ9MKxxOhVb2RzEmEwY0F9f+riPaSWQ== -aws-sdk@2.1030.0: - version "2.1030.0" - resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1030.0.tgz#24a856af3d2b8b37c14a8f59974993661c66fd82" - integrity sha512-to0STOb8DsSGuSsUb/WCbg/UFnMGfIYavnJH5ZlRCHzvCFjTyR+vfE8ku+qIZvfFM4+5MNTQC/Oxfun2X/TuyA== +aws-sdk@2.1692.0: + version "2.1692.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.1692.0.tgz#9dac5f7bfcc5ab45825cc8591b12753aa7d2902c" + integrity sha512-x511uiJ/57FIsbgUe5csJ13k3uzu25uWQE+XqfBis/sB0SFoiElJWXRkgEAUh0U6n40eT3ay5Ue4oPkRMu1LYw== dependencies: buffer "4.9.2" events "1.1.1" ieee754 "1.1.13" - jmespath "0.15.0" + jmespath "0.16.0" querystring "0.2.0" sax "1.2.1" url "0.10.3" - uuid "3.3.2" - xml2js "0.4.19" + util "^0.12.4" + uuid "8.0.0" + xml2js "0.6.2" aws-sign2@~0.7.0: version "0.7.0" @@ -7081,16 +7358,16 @@ aws-sign2@~0.7.0: integrity sha512-08kcGqnYf/YmjoRhfxyu+CLxBjUtHLXLXX/vUfx9l2LYzG3c1m61nrpyFUZI6zeS+Li/wWMMidD9KgrqtGq3mA== aws4@^1.8.0: - version "1.11.0" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.11.0.tgz#d61f46d83b2519250e2784daf5b09479a8b41c59" - integrity sha512-xh1Rl34h6Fi1DC2WWKfxUTVqRsNnr6LsKz2+hfwDxQJWmrx8+c7ylaqBMcHfl1U1r2dsifOvKX3LQuLNZ+XSvA== + version "1.13.2" + resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.13.2.tgz#0aa167216965ac9474ccfa83892cfb6b3e1e52ef" + integrity sha512-lHe62zvbTB5eEABUVi/AwVh0ZKY9rMMDhmm+eeyuuUQbQ3+J+fONVQOZyj+DdrvD4BY33uYniyRJ4UJIaSKAfw== -axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.5.0, axios@^1.6.2: - version "1.6.3" - resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4" - integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww== +axios@1.1.3, axios@1.7.7, axios@^0.21.1, axios@^1.0.0, axios@^1.1.3, axios@^1.4.0, axios@^1.6.2, axios@^1.6.8: + version "1.7.7" + resolved "https://registry.yarnpkg.com/axios/-/axios-1.7.7.tgz#2f554296f9892a72ac8d8e4c5b79c14a91d0a47f" + integrity sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q== dependencies: - follow-redirects "^1.15.0" + follow-redirects "^1.15.6" form-data "^4.0.0" proxy-from-env "^1.1.0" @@ -7307,20 +7584,20 @@ before-after-hook@^2.2.0: integrity sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ== big-integer@^1.6.43: - version "1.6.51" - resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.51.tgz#0df92a5d9880560d3ff2d5fd20245c889d130686" - integrity sha512-GPEid2Y9QU1Exl1rpO9B2IPJGHPSupF5GnVIP0blYvNOMer2bTvSWs1jGOUg04hTmu67nmLsQ9TBo1puaotBHg== - -bignumber.js@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-2.4.0.tgz#838a992da9f9d737e0f4b2db0be62bb09dd0c5e8" - integrity sha512-uw4ra6Cv483Op/ebM0GBKKfxZlSmn6NgFRby5L3yGTlunLj53KQgndDlqy2WVFOwgvurocApYkSud0aO+mvrpQ== + version "1.6.52" + resolved "https://registry.yarnpkg.com/big-integer/-/big-integer-1.6.52.tgz#60a887f3047614a8e1bffe5d7173490a97dc8c85" + integrity sha512-QxD8cf2eVqJOOz63z6JIN9BzvVs/dlySa5HGSBH5xtR8dPteIRQnBxxKqkNTiT6jbDTF6jAfrd4oMcND9RGbQg== bignumber.js@^9.0.0: version "9.1.1" resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.1.tgz#c4df7dc496bd849d4c9464344c1aa74228b4dac6" integrity sha512-pHm4LsMJ6lzgNGVfZHjMoO8sdoRhOzOH4MLmY65Jg70bpxCKu5iOHNJyfF6OyvYw7t8Fpf35RuzUyqnQsj8Vig== +bignumber.js@^9.1.2: + version "9.1.2" + resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.2.tgz#b7c4242259c008903b13707983b5f4bbd31eda0c" + integrity sha512-2/mKyZH9K85bzOEfhXDBFZTGd1CTs+5IHpeFQo9luiBG7hghdC851Pj2WAhb6E3R6b9tZj/XKhbg4fum+Kepug== + binary-extensions@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" @@ -7348,10 +7625,10 @@ bl@^4.0.3, bl@^4.1.0: inherits "^2.0.4" readable-stream "^3.4.0" -bl@^6.0.3: - version "6.0.13" - resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.13.tgz#dc5f288d3f849771bb6112b29477abee4c0a9d96" - integrity sha512-tMncAcpsyjZgAVbVFupVIaB2xud13xxT59fdHkuszY2jdZkqIWfpQdmII1fOe3kOGAz0mNLTIHEm+KxpYsQKKg== +bl@^6.0.11: + version "6.0.16" + resolved "https://registry.yarnpkg.com/bl/-/bl-6.0.16.tgz#29b190f1a754e2d168de3dc8c74ed8d12bf78e6e" + integrity sha512-V/kz+z2Mx5/6qDfRCilmrukUXcXuCoXKg3/3hDvzKKoSUx8CJKudfIoT29XZc3UE9xBvxs5qictiHdprwtteEg== dependencies: "@types/readable-stream" "^4.0.0" buffer "^6.0.3" @@ -7370,10 +7647,10 @@ bluebird@^3.5.1, bluebird@^3.7.2: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -bmp-js@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/bmp-js/-/bmp-js-0.1.0.tgz#e05a63f796a6c1ff25f4771ec7adadc148c07233" - integrity sha512-vHdS19CnY3hwiNdkaqk93DvjVLfbEcI8mys4UjuWrlX1haDmroo8o4xCzh4wD6DGV6HxRCyauwhHRqMTfERtjw== +bmp-ts@^1.0.9: + version "1.0.9" + resolved "https://registry.yarnpkg.com/bmp-ts/-/bmp-ts-1.0.9.tgz#0fd124ba812be9b786b29e5b186ee76d74ff5538" + integrity sha512-cTEHk2jLrPyi+12M3dhpEbnnPOsaZuq7C45ylbbQIiWgDFZq4UVYPEY5mlqjvsj/6gJv9qX5sa+ebDzLXT28Vw== bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.11.9: version "4.12.0" @@ -7398,7 +7675,7 @@ boolean@^3.0.1: bowser@^2.11.0: version "2.11.0" resolved "https://registry.yarnpkg.com/bowser/-/bowser-2.11.0.tgz#5ca3c35757a7aa5771500c70a73a9f91ef420a8f" - integrity "sha1-XKPDV1enqldxUAxwpzqfke9CCo8= sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA==" + integrity sha512-AlcaJBi/pqqJBIQ8U9Mcpc9i8Aqxn88Skv5d+xBX006BY5u8N3mGLHa5Lgppa7L/HfwgwLgZ6NYs+Ag6uUmJRA== boxen@^5.0.0: version "5.1.2" @@ -7577,11 +7854,6 @@ buffer-equal-constant-time@1.0.1: resolved "https://registry.yarnpkg.com/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz#f8e71132f7ffe6e01a5c9697a4c6f3e48d5cc819" integrity sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA== -buffer-equal@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/buffer-equal/-/buffer-equal-0.0.1.tgz#91bc74b11ea405bc916bc6aa908faafa5b4aac4b" - integrity sha512-RgSV6InVQ9ODPdLWJ5UAqBqJBOg370Nz6ZQtRzpt6nUjc8v0St97uJ4PYC6NztqIScrAXafKM3mZPMygSe1ggA== - buffer-es6@^4.9.2, buffer-es6@^4.9.3: version "4.9.3" resolved "https://registry.yarnpkg.com/buffer-es6/-/buffer-es6-4.9.3.tgz#f26347b82df76fd37e18bcb5288c4970cfd5c404" @@ -7629,7 +7901,7 @@ buffer@6.0.3, buffer@^6.0.3: base64-js "^1.3.1" ieee754 "^1.2.1" -buffer@^5.1.0, buffer@^5.2.0, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0: +buffer@^5.1.0, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -7843,6 +8115,11 @@ caseless@~0.12.0: resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" integrity sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw== +catering@^2.0.0, catering@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510" + integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w== + chai@^4.3.7: version "4.5.0" resolved "https://registry.yarnpkg.com/chai/-/chai-4.5.0.tgz#707e49923afdd9b13a8b0b47d33d732d13812fd8" @@ -8311,7 +8588,7 @@ compress-commons@^6.0.2: normalize-path "^3.0.0" readable-stream "^4.0.0" -compressible@^2.0.0, compressible@^2.0.12: +compressible@^2.0.0: version "2.0.18" resolved "https://registry.yarnpkg.com/compressible/-/compressible-2.0.18.tgz#af53cca6b070d4c3c0750fbd77286a6d7cc46fba" integrity sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg== @@ -8503,11 +8780,16 @@ convert-source-map@^2.0.0: resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-2.0.0.tgz#4b560f649fc4e918dd0ab75cf4961e8bc882d82a" integrity sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg== -cookie@^0.4.1, cookie@~0.4.1: +cookie@^0.4.1: version "0.4.2" resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== +cookie@~0.7.2: + version "0.7.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.7.2.tgz#556369c472a2ba910f2979891b526b3436237ed7" + integrity sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w== + cookiejar@^2.1.4: version "2.1.4" resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.4.tgz#ee669c1fea2cf42dc31585469d193fef0d65771b" @@ -9021,10 +9303,10 @@ dayjs@^1.10.8: resolved "https://registry.yarnpkg.com/dayjs/-/dayjs-1.11.13.tgz#92430b0139055c3ebb60150aa13e860a4b5a366c" integrity sha512-oaMBel6gjolK862uaPQOVTA7q3TZhuSvuMQAAglQDOWYO9A91IrAOUJEyKVlqJlHE0vq5p5UXxzdPfMH/x6xNg== -dc-polyfill@^0.1.2: - version "0.1.3" - resolved "https://registry.yarnpkg.com/dc-polyfill/-/dc-polyfill-0.1.3.tgz#fe9eefc86813439dd46d6f9ad9582ec079c39720" - integrity sha512-Wyk5n/5KUj3GfVKV2jtDbtChC/Ff9fjKsBcg4ZtYW1yQe3DXNHcGURvmoxhqQdfOQ9TwyMjnfyv1lyYcOkFkFA== +dc-polyfill@^0.1.2, dc-polyfill@^0.1.4: + version "0.1.6" + resolved "https://registry.yarnpkg.com/dc-polyfill/-/dc-polyfill-0.1.6.tgz#c2940fa68ffb24a7bf127cc6cfdd15b39f0e7f02" + integrity sha512-UV33cugmCC49a5uWAApM+6Ev9ZdvIUMTrtCO9fj96TPGOQiea54oeO3tiEVdVeo3J9N2UdJEmbS4zOkkEA35uQ== dd-trace@5.2.0: version "5.2.0" @@ -9063,6 +9345,43 @@ dd-trace@5.2.0: semver "^7.5.4" tlhunter-sorted-set "^0.1.0" +dd-trace@5.23.0: + version "5.23.0" + resolved "https://registry.yarnpkg.com/dd-trace/-/dd-trace-5.23.0.tgz#a0c11863406de440a6675648caf06e1d07d67ba8" + integrity sha512-nLvwSGpTMIk6S3sMSge6yFqqgqI573VgZc8MF31vl6K0ouJoE7OkVx9cmSVjS4CbSi525tcKq9z7tApsNLpVLQ== + dependencies: + "@datadog/native-appsec" "8.1.1" + "@datadog/native-iast-rewriter" "2.4.1" + "@datadog/native-iast-taint-tracking" "3.1.0" + "@datadog/native-metrics" "^2.0.0" + "@datadog/pprof" "5.3.0" + "@datadog/sketches-js" "^2.1.0" + "@opentelemetry/api" ">=1.0.0 <1.9.0" + "@opentelemetry/core" "^1.14.0" + crypto-randomuuid "^1.0.0" + dc-polyfill "^0.1.4" + ignore "^5.2.4" + import-in-the-middle "1.11.2" + int64-buffer "^0.1.9" + istanbul-lib-coverage "3.2.0" + jest-docblock "^29.7.0" + jsonpath-plus "^9.0.0" + koalas "^1.0.2" + limiter "1.1.5" + lodash.sortby "^4.7.0" + lru-cache "^7.14.0" + module-details-from-path "^1.0.3" + msgpack-lite "^0.1.26" + opentracing ">=0.12.1" + path-to-regexp "^0.1.10" + pprof-format "^2.1.0" + protobufjs "^7.2.5" + retry "^0.13.1" + rfdc "^1.3.1" + semver "^7.5.4" + shell-quote "^1.8.1" + tlhunter-sorted-set "^0.1.0" + debug@4, debug@^4.0.0, debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1, debug@^4.3.2, debug@^4.3.3, debug@^4.3.4, debug@~4.3.1, debug@~4.3.2, debug@~4.3.4: version "4.3.6" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.6.tgz#2ab2c38fbaffebf8aa95fdfe6d88438c7a13c52b" @@ -9077,7 +9396,7 @@ debug@4.3.4: dependencies: ms "2.1.2" -debug@^3.1.0, debug@^3.2.6, debug@^3.2.7: +debug@^3.1.0, debug@^3.2.7: version "3.2.7" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== @@ -9837,6 +10156,16 @@ duplexify@^4.0.0, duplexify@^4.1.2: readable-stream "^3.1.1" stream-shift "^1.0.0" +duplexify@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/duplexify/-/duplexify-4.1.3.tgz#a07e1c0d0a2c001158563d32592ba58bddb0236f" + integrity sha512-M3BmBhwJRZsSx38lZyhE53Csddgzl5R7xGJNk7CVddZD6CcmwMCH8J+7AprIrQKH7TonKxaCjcv27Qmf+sQ+oA== + dependencies: + end-of-stream "^1.4.1" + inherits "^2.0.3" + readable-stream "^3.1.1" + stream-shift "^1.0.2" + eastasianwidth@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/eastasianwidth/-/eastasianwidth-0.2.0.tgz#696ce2ec0aa0e6ea93a397ffcf24aa7840c827cb" @@ -9985,17 +10314,17 @@ engine.io-parser@~5.2.1: resolved "https://registry.yarnpkg.com/engine.io-parser/-/engine.io-parser-5.2.2.tgz#37b48e2d23116919a3453738c5720455e64e1c49" integrity sha512-RcyUFKA93/CXH20l4SoVvzZfrSDMOTUS3bWVpTt2FuFP+XYrL8i8oonHP7WInRyVHXh0n/ORtoeiE1os+8qkSw== -engine.io@~6.5.2: - version "6.5.5" - resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.5.5.tgz#430b80d8840caab91a50e9e23cb551455195fc93" - integrity sha512-C5Pn8Wk+1vKBoHghJODM63yk8MvrO9EWZUfkAt5HAqIgPE4/8FF0PEGHXtEd40l223+cE5ABWuPzm38PHFXfMA== +engine.io@~6.6.0: + version "6.6.2" + resolved "https://registry.yarnpkg.com/engine.io/-/engine.io-6.6.2.tgz#32bd845b4db708f8c774a4edef4e5c8a98b3da72" + integrity sha512-gmNvsYi9C8iErnZdVcJnvCpSKbWTt1E8+JZo8b+daLninywUWi5NQ5STSHZ9rFjFO7imNcvb8Pc5pe/wMR5xEw== dependencies: "@types/cookie" "^0.4.1" "@types/cors" "^2.8.12" "@types/node" ">=10.0.0" accepts "~1.3.4" base64id "2.0.0" - cookie "~0.4.1" + cookie "~0.7.2" cors "~2.8.5" debug "~4.3.1" engine.io-parser "~5.2.1" @@ -10016,11 +10345,6 @@ enquirer@~2.3.6: dependencies: ansi-colors "^4.1.1" -ent@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/ent/-/ent-2.2.0.tgz#e964219325a21d05f44466a2f686ed6ce5f5dd1d" - integrity "sha1-6WQhkyWiHQX0RGai9obtbOX13R0= sha512-GHrMyVZQWvTIdDtpiEXdHZnFQKzeO09apj8Cbl4pKWy4i0Oprcq17usfDt5aO63swf0JOeMWjWQE/LzgSRuWpA==" - entities@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/entities/-/entities-2.2.0.tgz#098dc90ebb83d8dffa089d55256b351d34c4da55" @@ -10060,7 +10384,7 @@ error-ex@^1.3.1: dependencies: is-arrayish "^0.2.1" -es-abstract@^1.17.5, es-abstract@^1.22.1, es-abstract@^1.22.3, es-abstract@^1.23.0, es-abstract@^1.23.2: +es-abstract@^1.22.1, es-abstract@^1.22.3, es-abstract@^1.23.0: version "1.23.3" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.23.3.tgz#8f0c5a35cd215312573c5a27c87dfd6c881a0aa0" integrity sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A== @@ -10112,19 +10436,57 @@ es-abstract@^1.17.5, es-abstract@^1.22.1, es-abstract@^1.22.3, es-abstract@^1.23 unbox-primitive "^1.0.2" which-typed-array "^1.1.15" -es-aggregate-error@^1.0.9: - version "1.0.13" - resolved "https://registry.yarnpkg.com/es-aggregate-error/-/es-aggregate-error-1.0.13.tgz#7f28b77c9d8d09bbcd3a466e4be9fe02fa985201" - integrity sha512-KkzhUUuD2CUMqEc8JEqsXEMDHzDPE8RCjZeUBitsnB1eNcAJWQPiciKsMXe3Yytj4Flw1XLl46Qcf9OxvZha7A== +es-abstract@^1.23.3: + version "1.23.5" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.23.5.tgz#f4599a4946d57ed467515ed10e4f157289cd52fb" + integrity sha512-vlmniQ0WNPwXqA0BnmwV3Ng7HxiGlh6r5U6JcTMNx8OilcAGqVJBHJcPjqOMaczU9fRuRK5Px2BdVyPRnKMMVQ== dependencies: - define-data-property "^1.1.4" - define-properties "^1.2.1" - es-abstract "^1.23.2" + array-buffer-byte-length "^1.0.1" + arraybuffer.prototype.slice "^1.0.3" + available-typed-arrays "^1.0.7" + call-bind "^1.0.7" + data-view-buffer "^1.0.1" + data-view-byte-length "^1.0.1" + data-view-byte-offset "^1.0.0" + es-define-property "^1.0.0" es-errors "^1.3.0" - function-bind "^1.1.2" - globalthis "^1.0.3" + es-object-atoms "^1.0.0" + es-set-tostringtag "^2.0.3" + es-to-primitive "^1.2.1" + function.prototype.name "^1.1.6" + get-intrinsic "^1.2.4" + get-symbol-description "^1.0.2" + globalthis "^1.0.4" + gopd "^1.0.1" has-property-descriptors "^1.0.2" - set-function-name "^2.0.2" + has-proto "^1.0.3" + has-symbols "^1.0.3" + hasown "^2.0.2" + internal-slot "^1.0.7" + is-array-buffer "^3.0.4" + is-callable "^1.2.7" + is-data-view "^1.0.1" + is-negative-zero "^2.0.3" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.3" + is-string "^1.0.7" + is-typed-array "^1.1.13" + is-weakref "^1.0.2" + object-inspect "^1.13.3" + object-keys "^1.1.1" + object.assign "^4.1.5" + regexp.prototype.flags "^1.5.3" + safe-array-concat "^1.1.2" + safe-regex-test "^1.0.3" + string.prototype.trim "^1.2.9" + string.prototype.trimend "^1.0.8" + string.prototype.trimstart "^1.0.8" + typed-array-buffer "^1.0.2" + typed-array-byte-length "^1.0.1" + typed-array-byte-offset "^1.0.2" + typed-array-length "^1.0.6" + unbox-primitive "^1.0.2" + which-typed-array "^1.1.15" es-define-property@^1.0.0: version "1.0.0" @@ -10754,13 +11116,18 @@ fast-url-parser@^1.1.3: dependencies: punycode "^1.3.2" -fast-xml-parser@4.2.5, fast-xml-parser@4.4.1, fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: +fast-xml-parser@4.4.1, fast-xml-parser@^4.2.5, fast-xml-parser@^4.4.1: version "4.4.1" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.4.1.tgz#86dbf3f18edf8739326447bcaac31b4ae7f6514f" integrity sha512-xkjOecfnKGkSsOwtZ5Pz7Us/T6mrbPQrq0nh+aCO5V9nk5NLWmasAHumTKjiPJPWANe+kAZ84Jc8ooJkzZ88Sw== dependencies: strnum "^1.0.5" +fastest-levenshtein@^1.0.16: + version "1.0.16" + resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz#210e61b6ff181de91ea9b3d1b84fdedd47e034e5" + integrity sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg== + fastq@^1.6.0: version "1.13.0" resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" @@ -10801,13 +11168,6 @@ fengari@^0.1.4: sprintf-js "^1.1.1" tmp "^0.0.33" -fetch-cookie@0.10.1: - version "0.10.1" - resolved "https://registry.yarnpkg.com/fetch-cookie/-/fetch-cookie-0.10.1.tgz#5ea88f3d36950543c87997c27ae2aeafb4b5c4d4" - integrity sha512-beB+VEd4cNeVG1PY+ee74+PkuCQnik78pgLi5Ah/7qdUfov8IctU0vLUbBT8/10Ma5GMBeI4wtxhGrEfKNYs2g== - dependencies: - tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0" - fetch-cookie@0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/fetch-cookie/-/fetch-cookie-0.11.0.tgz#e046d2abadd0ded5804ce7e2cae06d4331c15407" @@ -10815,6 +11175,14 @@ fetch-cookie@0.11.0: dependencies: tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0" +fetch-cookie@2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/fetch-cookie/-/fetch-cookie-2.2.0.tgz#01086b6b5b1c3e08f15ffd8647b02ca100377365" + integrity sha512-h9AgfjURuCgA2+2ISl8GbavpUdR+WGAM2McW/ovn4tVccegp8ZqCKWSBR8uRdM8dDNlx5WdKRWxBYUwteLDCNQ== + dependencies: + set-cookie-parser "^2.4.8" + tough-cookie "^4.0.0" + fflate@^0.4.8: version "0.4.8" resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae" @@ -10844,7 +11212,7 @@ file-type@^12.1.0: resolved "https://registry.yarnpkg.com/file-type/-/file-type-12.4.2.tgz#a344ea5664a1d01447ee7fb1b635f72feb6169d9" integrity sha512-UssQP5ZgIOKelfsaB5CuGAL+Y+q7EmONuiwF3N5HAH0t27rvrttgi6Ra9k/+DVaY9UF6+ybxu5pOXLUdA8N7Vg== -file-type@^16.5.4: +file-type@^16.0.0: version "16.5.4" resolved "https://registry.yarnpkg.com/file-type/-/file-type-16.5.4.tgz#474fb4f704bee427681f98dd390058a172a6c2fd" integrity sha512-/yFHK0aGjFEgDJjEKP0pWCplsPFPhwyfwevf/pVxiN0tmE4L9LmwWxWukdJSHdoCli4VgQLehjJtwQBnqmsKcw== @@ -10998,10 +11366,10 @@ fn.name@1.x.x: resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== -follow-redirects@^1.15.0: - version "1.15.6" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b" - integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA== +follow-redirects@^1.15.6: + version "1.15.9" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.9.tgz#a604fa10e443bf98ca94228d9eebcc2e8a2c8ee1" + integrity sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ== for-each@^0.3.3: version "0.3.3" @@ -11244,6 +11612,17 @@ gaxios@^6.0.0, gaxios@^6.1.1: node-fetch "^2.6.9" uuid "^10.0.0" +gaxios@^6.0.2: + version "6.7.1" + resolved "https://registry.yarnpkg.com/gaxios/-/gaxios-6.7.1.tgz#ebd9f7093ede3ba502685e73390248bb5b7f71fb" + integrity sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ== + dependencies: + extend "^3.0.2" + https-proxy-agent "^7.0.1" + is-stream "^2.0.0" + node-fetch "^2.6.9" + uuid "^9.0.1" + gcp-metadata@^5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/gcp-metadata/-/gcp-metadata-5.3.0.tgz#6f45eb473d0cb47d15001476b48b663744d25408" @@ -11651,6 +12030,14 @@ globalthis@^1.0.1, globalthis@^1.0.3: dependencies: define-properties "^1.1.3" +globalthis@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.4.tgz#7430ed3a975d97bfb59bcce41f5cabbafa651236" + integrity sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ== + dependencies: + define-properties "^1.2.1" + gopd "^1.0.1" + globalyzer@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/globalyzer/-/globalyzer-0.1.0.tgz#cb76da79555669a1519d5a8edf093afaa0bf1465" @@ -11707,6 +12094,18 @@ google-auth-library@^9.3.0: gtoken "^7.0.0" jws "^4.0.0" +google-auth-library@^9.6.3: + version "9.15.0" + resolved "https://registry.yarnpkg.com/google-auth-library/-/google-auth-library-9.15.0.tgz#1b009c08557929c881d72f953f17e839e91b009b" + integrity sha512-7ccSEJFDFO7exFbO6NRyC+xH8/mZ1GZGG2xxx9iHxZWcjUjJpjWxIMw3cofAKcueZ6DATiukmmprD7yavQHOyQ== + dependencies: + base64-js "^1.3.0" + ecdsa-sig-formatter "^1.0.11" + gaxios "^6.1.1" + gcp-metadata "^6.1.0" + gtoken "^7.0.0" + jws "^4.0.0" + google-gax@^4.3.3: version "4.3.7" resolved "https://registry.yarnpkg.com/google-gax/-/google-gax-4.3.7.tgz#f1870902d09c54c5d1735ef1ee7903d4458d6a49" @@ -12044,6 +12443,11 @@ html-encoding-sniffer@^3.0.0: dependencies: whatwg-encoding "^2.0.0" +html-entities@^2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/html-entities/-/html-entities-2.5.2.tgz#201a3cf95d3a15be7099521620d19dfb4f65359f" + integrity sha512-K//PSRMQk4FZ78Kyau+mZurHn3FH0Vwr+H36eE0rPbeYkRRi9YxceYPhuN60UwWorxyKHhqoAJl2OFKa4BVtaA== + html-escaper@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" @@ -12092,7 +12496,7 @@ http-cookie-agent@^4.0.2: dependencies: agent-base "^6.0.2" -http-errors@2.0.0: +http-errors@2.0.0, http-errors@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3" integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ== @@ -12142,6 +12546,14 @@ http-proxy-agent@^5.0.0: agent-base "6" debug "4" +http-proxy-agent@^7.0.0: + version "7.0.2" + resolved "https://registry.yarnpkg.com/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz#9a8b1f246866c028509486585f62b8f2c18c270e" + integrity sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig== + dependencies: + agent-base "^7.1.0" + debug "^4.3.4" + http-signature@~1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" @@ -12159,7 +12571,7 @@ https-proxy-agent@^5.0.0, https-proxy-agent@^5.0.1: agent-base "6" debug "4" -https-proxy-agent@^7.0.1: +https-proxy-agent@^7.0.0, https-proxy-agent@^7.0.1, https-proxy-agent@^7.0.2: version "7.0.5" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-7.0.5.tgz#9e8b5013873299e11fab6fd548405da2d6c602b2" integrity sha512-1e4Wqeblerz+tMKPIq2EMGiiWW1dIjZOksyHWSUm1rmuvw/how9hBHZ38lAGj5ID4Ik6EdkOw7NmWPy6LAwalw== @@ -12305,13 +12717,13 @@ import-from@^3.0.0: dependencies: resolve-from "^5.0.0" -import-in-the-middle@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.7.3.tgz#ffa784cdd57a47d2b68d2e7dd33070ff06baee43" - integrity sha512-R2I11NRi0lI3jD2+qjqyVlVEahsejw7LDnYEbGb47QEFjczE3bZYsmWheCTQA+LFs2DzOQxR7Pms7naHW1V4bQ== +import-in-the-middle@1.11.2, import-in-the-middle@^1.7.3: + version "1.11.2" + resolved "https://registry.yarnpkg.com/import-in-the-middle/-/import-in-the-middle-1.11.2.tgz#dd848e72b63ca6cd7c34df8b8d97fc9baee6174f" + integrity sha512-gK6Rr6EykBcc6cVWRSBR5TWf8nn6hZMYSRYqCcHa0l0d1fPK7JSYo6+Mlmck76jIX9aL/IZ71c06U2VpFwl1zA== dependencies: acorn "^8.8.2" - acorn-import-assertions "^1.9.0" + acorn-import-attributes "^1.9.5" cjs-module-lexer "^1.2.2" module-details-from-path "^1.0.3" @@ -12520,11 +12932,11 @@ ip@^2.0.0: integrity sha512-WKa+XuLG1A1R0UWhl2+1XQSi+fZWMsYKffMZTTYsiZaUD8k2yDAj5atimTUD2TZkyCkNEeYE5NhFZmupOGtjYQ== ipaddr.js@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.1.0.tgz#2119bc447ff8c257753b196fc5f1ce08a4cdf39f" - integrity sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ== + version "2.2.0" + resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-2.2.0.tgz#d33fa7bac284f4de7af949638c9d68157c6b92e8" + integrity sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA== -is-arguments@^1.1.1: +is-arguments@^1.0.4, is-arguments@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/is-arguments/-/is-arguments-1.1.1.tgz#15b3f88fda01f2a97fec84ca761a560f123efa9b" integrity sha512-8Q7EARjzEnKpt/PCD7e1cgUS0a6X8u5tdSiMqXhojOdoV9TsMsiO+9VLC5vAmO8N7/GmXn7yjR8qnA6bVAEzfA== @@ -12572,6 +12984,11 @@ is-boolean-object@^1.1.0: call-bind "^1.0.2" has-tostringtag "^1.0.0" +is-buffer@^2.0.5: + version "2.0.5" + resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" + integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== + is-builtin-module@^3.2.1: version "3.2.1" resolved "https://registry.yarnpkg.com/is-builtin-module/-/is-builtin-module-3.2.1.tgz#f03271717d8654cfcaf07ab0463faa3571581169" @@ -12907,7 +13324,7 @@ is-type-of@^1.0.0: is-class-hotfix "~0.0.6" isstream "~0.1.2" -is-typed-array@^1.1.13: +is-typed-array@^1.1.13, is-typed-array@^1.1.3: version "1.1.13" resolved "https://registry.yarnpkg.com/is-typed-array/-/is-typed-array-1.1.13.tgz#d6c5ca56df62334959322d7d7dd1cca50debe229" integrity sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw== @@ -13040,14 +13457,6 @@ isolated-vm@^4.7.2: dependencies: prebuild-install "^7.1.1" -isomorphic-fetch@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4" - integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA== - dependencies: - node-fetch "^2.6.1" - whatwg-fetch "^3.4.1" - isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" @@ -13546,20 +13955,43 @@ jest@29.7.0: import-local "^3.0.2" jest-cli "^29.7.0" -jimp@0.22.12: - version "0.22.12" - resolved "https://registry.yarnpkg.com/jimp/-/jimp-0.22.12.tgz#f99d1f3ec0d9d930cb7bd8f5b479859ee3a15694" - integrity sha512-R5jZaYDnfkxKJy1dwLpj/7cvyjxiclxU3F4TrI/J4j2rS0niq6YDUMoPn5hs8GDpO+OZGo7Ky057CRtWesyhfg== +jimp@1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/jimp/-/jimp-1.1.4.tgz#943356f27559815690a3c2e29fa67ecfd9a92658" + integrity sha512-DL82Spu4H7B332nhddz5Cq9J0WEa5mc9d6BJQfeLHf2LOAMg79A+74KRKKzogaLgqK8APGfoWLwca7KjjvBgig== dependencies: - "@jimp/custom" "^0.22.12" - "@jimp/plugins" "^0.22.12" - "@jimp/types" "^0.22.12" - regenerator-runtime "^0.13.3" + "@jimp/core" "1.1.4" + "@jimp/diff" "1.1.4" + "@jimp/js-bmp" "1.1.4" + "@jimp/js-gif" "1.1.4" + "@jimp/js-jpeg" "1.1.4" + "@jimp/js-png" "1.1.4" + "@jimp/js-tiff" "1.1.4" + "@jimp/plugin-blit" "1.1.4" + "@jimp/plugin-blur" "1.1.4" + "@jimp/plugin-circle" "1.1.4" + "@jimp/plugin-color" "1.1.4" + "@jimp/plugin-contain" "1.1.4" + "@jimp/plugin-cover" "1.1.4" + "@jimp/plugin-crop" "1.1.4" + "@jimp/plugin-displace" "1.1.4" + "@jimp/plugin-dither" "1.1.4" + "@jimp/plugin-fisheye" "1.1.4" + "@jimp/plugin-flip" "1.1.4" + "@jimp/plugin-hash" "1.1.4" + "@jimp/plugin-mask" "1.1.4" + "@jimp/plugin-print" "1.1.4" + "@jimp/plugin-quantize" "1.1.4" + "@jimp/plugin-resize" "1.1.4" + "@jimp/plugin-rotate" "1.1.4" + "@jimp/plugin-threshold" "1.1.4" + "@jimp/types" "1.1.4" + "@jimp/utils" "1.1.4" -jmespath@0.15.0: - version "0.15.0" - resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217" - integrity sha512-+kHj8HXArPfpPEKGLZ+kB5ONRTCiGQXo8RQYL0hH8t6pWXUBBK5KkkQmTNOwKK4LEsd0yTsgtjJVm4UBSZea4w== +jmespath@0.16.0: + version "0.16.0" + resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.16.0.tgz#b15b0a85dfd4d930d43e69ed605943c802785076" + integrity sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw== joi@17.6.0: version "17.6.0" @@ -13617,11 +14049,6 @@ js-yaml@^3.10.0, js-yaml@^3.13.1, js-yaml@^3.14.1: argparse "^1.0.7" esprima "^4.0.0" -jsbi@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/jsbi/-/jsbi-4.3.0.tgz#b54ee074fb6fcbc00619559305c8f7e912b04741" - integrity sha512-SnZNcinB4RIcnEyZqFPdGPVgrg2AcnykiBy0sHVJQKHYeaLUvi3Exj+iaPpLnFVkDPZIV4U0yvgC9/R4uEAZ9g== - jsbn@~0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" @@ -13692,6 +14119,11 @@ jsdom@^21.1.1: ws "^8.13.0" xml-name-validator "^4.0.0" +jsep@^1.3.8: + version "1.4.0" + resolved "https://registry.yarnpkg.com/jsep/-/jsep-1.4.0.tgz#19feccbfa51d8a79f72480b4b8e40ce2e17152f0" + integrity sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw== + jsesc@^2.5.1: version "2.5.2" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" @@ -13795,6 +14227,15 @@ jsonparse@^1.2.0, jsonparse@^1.3.1: resolved "https://registry.yarnpkg.com/jsonparse/-/jsonparse-1.3.1.tgz#3f4dae4a91fac315f71062f8521cc239f1366280" integrity sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg== +jsonpath-plus@^9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/jsonpath-plus/-/jsonpath-plus-9.0.0.tgz#bb8703ee481531142bca8dee9a42fe72b8358a7f" + integrity sha512-bqE77VIDStrOTV/czspZhTn+o27Xx9ZJRGVkdVShEtPoqsIx5yALv3lWVU6y+PqYvWPJNWE7ORCQheQkEe0DDA== + dependencies: + "@jsep-plugin/assignment" "^1.2.1" + "@jsep-plugin/regex" "^1.0.3" + jsep "^1.3.8" + jsonschema@1.4.0: version "1.4.0" resolved "https://registry.yarnpkg.com/jsonschema/-/jsonschema-1.4.0.tgz#1afa34c4bc22190d8e42271ec17ac8b3404f87b2" @@ -14261,6 +14702,13 @@ level-codec@9.0.2, level-codec@^9.0.0: dependencies: buffer "^5.6.0" +level-concat-iterator@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/level-concat-iterator/-/level-concat-iterator-3.1.0.tgz#5235b1f744bc34847ed65a50548aa88d22e881cf" + integrity sha512-BWRCMHBxbIqPxJ8vHOvKUsaO0v1sLYZtjN3K2iZJsRBYtp+ONsY6Jfi6hy9K3+zolgQRryhIn2NRZjZnWJ9NmQ== + dependencies: + catering "^2.1.0" + level-concat-iterator@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/level-concat-iterator/-/level-concat-iterator-2.0.1.tgz#1d1009cf108340252cb38c51f9727311193e6263" @@ -14363,6 +14811,11 @@ level-sublevel@^5.2.0: string-range "~1.2.1" xtend "~2.0.4" +level-supports@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-2.1.0.tgz#9af908d853597ecd592293b2fad124375be79c5f" + integrity sha512-E486g1NCjW5cF78KGPrMDRBYzPuueMZ6VBXHT6gC7A8UYWGiM14fGgp+s/L1oFfDWSPV/+SFkYCmZ0SiESkRKA== + level-supports@~1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/level-supports/-/level-supports-1.0.1.tgz#2f530a596834c7301622521988e2c36bb77d122d" @@ -14395,6 +14848,15 @@ leveldown@5.6.0, leveldown@^5.4.0: napi-macros "~2.0.0" node-gyp-build "~4.1.0" +leveldown@6.1.1: + version "6.1.1" + resolved "https://registry.yarnpkg.com/leveldown/-/leveldown-6.1.1.tgz#0f0e480fa88fd807abf94c33cb7e40966ea4b5ce" + integrity sha512-88c+E+Eizn4CkQOBHwqlCJaTNEjGpaEIikn1S+cINc5E9HEvJ77bqY4JY/HxT5u0caWqsc3P3DcFIKBI1vHt+A== + dependencies: + abstract-leveldown "^7.2.0" + napi-macros "~2.0.0" + node-gyp-build "^4.3.0" + levelup@4.4.0, levelup@^4.3.2: version "4.4.0" resolved "https://registry.yarnpkg.com/levelup/-/levelup-4.4.0.tgz#f89da3a228c38deb49c48f88a70fb71f01cafed6" @@ -14489,20 +14951,6 @@ lines-and-columns@~2.0.3: resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-2.0.3.tgz#b2f0badedb556b747020ab8ea7f0373e22efac1b" integrity sha512-cNOjgCnLB+FnvWWtyRTzmB3POJ+cXxTA81LoW7u8JdmhfXzriropYwpjShnz1QLLWsQwY7nIxoDmcPTwphDK9w== -load-bmfont@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/load-bmfont/-/load-bmfont-1.4.1.tgz#c0f5f4711a1e2ccff725a7b6078087ccfcddd3e9" - integrity sha512-8UyQoYmdRDy81Brz6aLAUhfZLwr5zV0L3taTQ4hju7m6biuwiWiJXjPhBJxbUQJA8PrkvJ/7Enqmwk2sM14soA== - dependencies: - buffer-equal "0.0.1" - mime "^1.3.4" - parse-bmfont-ascii "^1.0.3" - parse-bmfont-binary "^1.0.5" - parse-bmfont-xml "^1.1.4" - phin "^2.9.1" - xhr "^2.0.1" - xtend "^4.0.0" - load-json-file@6.2.0: version "6.2.0" resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-6.2.0.tgz#5c7770b42cafa97074ca2848707c61662f4251a1" @@ -14728,12 +15176,12 @@ log-symbols@^4.1.0: chalk "^4.1.0" is-unicode-supported "^0.1.0" -logform@^2.3.2, logform@^2.4.0: - version "2.5.1" - resolved "https://registry.yarnpkg.com/logform/-/logform-2.5.1.tgz#44c77c34becd71b3a42a3970c77929e52c6ed48b" - integrity sha512-9FyqAm9o9NKKfiAKfZoYo9bGXXuwMkxQiQttkT4YjjVtQVIQtK6LmVtlxmCaFswo6N4AfEkHqZTV0taDtPotNg== +logform@^2.7.0: + version "2.7.0" + resolved "https://registry.yarnpkg.com/logform/-/logform-2.7.0.tgz#cfca97528ef290f2e125a08396805002b2d060d1" + integrity sha512-TFYA4jnP7PVbmlBIfhlSe+WKxs9dklXMTEGcBCIvLhE/Tn3H6Gk1norupVW7m5Cnd4bLcr08AytbyV/xj7f/kQ== dependencies: - "@colors/colors" "1.5.0" + "@colors/colors" "1.6.0" "@types/triple-beam" "^1.3.2" fecha "^4.2.0" ms "^2.1.1" @@ -15127,7 +15575,7 @@ mime-kind@^3.0.0: file-type "^12.1.0" mime-types "^2.1.24" -mime-types@^2.0.8, mime-types@^2.1.12, mime-types@^2.1.18, mime-types@^2.1.24, mime-types@^2.1.29, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: +mime-types@^2.1.12, mime-types@^2.1.18, mime-types@^2.1.24, mime-types@^2.1.29, mime-types@~2.1.19, mime-types@~2.1.24, mime-types@~2.1.34: version "2.1.35" resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a" integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw== @@ -15139,16 +15587,16 @@ mime@2.6.0: resolved "https://registry.yarnpkg.com/mime/-/mime-2.6.0.tgz#a2a682a95cd4d0cb1d6257e28f83da7e35800367" integrity sha512-USPkMeET31rOMiarsBNIHZKLGgvKc/LrjofAnBlOttf5ajRvqiRA8QsenbcooctK6d6Ts6aqZXBA+XbkKthiQg== +mime@3, mime@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" + integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== + mime@^1.3.4: version "1.6.0" resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== -mime@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7" - integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A== - mimic-fn@^2.0.0, mimic-fn@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" @@ -15400,16 +15848,16 @@ module-lookup-amd@^7.0.1: requirejs-config-file "^4.0.0" moment-timezone@^0.5.15: - version "0.5.41" - resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.41.tgz#a7ad3285fd24aaf5f93b8119a9d749c8039c64c5" - integrity sha512-e0jGNZDOHfBXJGz8vR/sIMXvBIGJJcqFjmlg9lmE+5KX1U7/RZNMswfD8nKnNCnQdKTIj50IaRKwl1fvMLyyRg== + version "0.5.46" + resolved "https://registry.yarnpkg.com/moment-timezone/-/moment-timezone-0.5.46.tgz#a21aa6392b3c6b3ed916cd5e95858a28d893704a" + integrity sha512-ZXm9b36esbe7OmdABqIWJuBBiLLwAjrN7CE+7sYdCCx82Nabt1wHDj8TVseS59QIlfFPbOoiBPm6ca9BioG4hw== dependencies: moment "^2.29.4" moment@^2.29.4: - version "2.29.4" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" - integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== + version "2.30.1" + resolved "https://registry.yarnpkg.com/moment/-/moment-2.30.1.tgz#f8c91c07b7a786e30c59926df530b4eac96974ae" + integrity sha512-uEmtNhbDOrWPFS+hdjFCBfy9f2YoyzRpwcl+DqpC6taX21FzsTLQVbMV/W7PzNSX6x/bhC1zA3c2UQ5NzH6how== mongodb-connection-string-url@^3.0.0: version "3.0.0" @@ -15469,17 +15917,17 @@ msgpackr@1.10.1, msgpackr@^1.5.2: optionalDependencies: msgpackr-extract "^3.0.2" -mssql@10.0.1: - version "10.0.1" - resolved "https://registry.yarnpkg.com/mssql/-/mssql-10.0.1.tgz#96053ae91b96fdc0469b9d8ca34663d448075bdf" - integrity sha512-k0Xkav/3OppZs8Kj+FIo7k7ejbcsVNxp5/ePayxfXzuBZhxD/Y/RhIhrtfHyH6FmlJnBQPj7eDI2IN7B0BiSxQ== +mssql@11.0.1: + version "11.0.1" + resolved "https://registry.yarnpkg.com/mssql/-/mssql-11.0.1.tgz#a32ab7763bfbb3f5d970e47563df3911fc04e21d" + integrity sha512-KlGNsugoT90enKlR8/G36H0kTxPthDhmtNUCwEHvgRza5Cjpjoj+P2X6eMpFUDN7pFrJZsKadL4x990G8RBE1w== dependencies: "@tediousjs/connection-string" "^0.5.0" commander "^11.0.0" debug "^4.3.3" rfdc "^1.3.0" tarn "^3.0.2" - tedious "^16.4.0" + tedious "^18.2.1" multi-part-lite@^1.0.0: version "1.0.0" @@ -15660,7 +16108,7 @@ node-domexception@1.0.0: resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5" integrity sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ== -node-fetch@2.6.0, node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: +node-fetch@2.6.7, node-fetch@2.6.9, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== @@ -15683,9 +16131,9 @@ node-gyp-build@<4.0, node-gyp-build@^3.9.0: integrity sha512-zLcTg6P4AbcHPq465ZMFNXx7XpKKJh+7kkN699NiQWisR2uWYOWNWqRHAmbnmKiL4e9aLSlmy5U7rEMUXV59+A== node-gyp-build@^4.3.0, node-gyp-build@^4.5.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.6.0.tgz#0c52e4cbf54bbd28b709820ef7b6a3c2d6209055" - integrity sha512-NTZVKn9IylLwUzaKjkas1e4u2DLNcV4rdYagA4PWdPwW87Bi7z+BznyKSRwS/761tV/lzCGXplWsiaMjLqP2zQ== + version "4.8.4" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.8.4.tgz#8a70ee85464ae52327772a90d66c6077a900cfc8" + integrity sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ== node-gyp-build@~4.1.0: version "4.1.1" @@ -15996,7 +16444,7 @@ nth-check@^2.0.1: nunjucks@^3.2.3: version "3.2.4" resolved "https://registry.yarnpkg.com/nunjucks/-/nunjucks-3.2.4.tgz#f0878eef528ce7b0aa35d67cc6898635fd74649e" - integrity "sha1-8IeO71KM57CqNdZ8xomGNf10ZJ4= sha512-26XRV6BhkgK0VOxfbU5cQI+ICFUtMLixv1noZn1tGU38kQH5A5nmmbk/O45xdyBhD1esk47nKrY0mvQpZIhRjQ==" + integrity sha512-26XRV6BhkgK0VOxfbU5cQI+ICFUtMLixv1noZn1tGU38kQH5A5nmmbk/O45xdyBhD1esk47nKrY0mvQpZIhRjQ== dependencies: a-sync-waterfall "^1.0.0" asap "^2.0.3" @@ -16106,6 +16554,11 @@ object-inspect@^1.13.1: resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2" integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ== +object-inspect@^1.13.3: + version "1.13.3" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.3.tgz#f14c183de51130243d6d18ae149375ff50ea488a" + integrity sha512-kDCGIbxkDSXE3euJZZXzc6to7fCrKHNI/hSRQnRuQ+BWjFNzZwiFF8fj/6o2t2G9/jTj8PSIYTfCLelLZEeRpA== + object-is@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/object-is/-/object-is-1.1.6.tgz#1a6a53aed2dd8f7e6775ff870bea58545956ab07" @@ -16192,7 +16645,7 @@ octal@^1.0.0: resolved "https://registry.yarnpkg.com/octal/-/octal-1.0.0.tgz#63e7162a68efbeb9e213588d58e989d1e5c4530b" integrity sha512-nnda7W8d+A3vEIY+UrDQzzboPf1vhs4JYVhff5CDkq9QNoZY7Xrxeo/htox37j9dZf7yNHevZzqtejWgy1vCqQ== -omggif@^1.0.10, omggif@^1.0.9: +omggif@^1.0.10: version "1.0.10" resolved "https://registry.yarnpkg.com/omggif/-/omggif-1.0.10.tgz#ddaaf90d4a42f532e9e7cb3a95ecdd47f17c7b19" integrity sha512-LMJTtvgc/nugXj0Vcrrs68Mn2D1r0zf630VNtqtpI1FEO7e+O9FP4gqs9AcnBaSEeoHIPm28u6qgPR0oyEpGSw== @@ -16613,23 +17066,23 @@ parse-asn1@^5.0.0, parse-asn1@^5.1.7: pbkdf2 "^3.1.2" safe-buffer "^5.2.1" -parse-bmfont-ascii@^1.0.3: +parse-bmfont-ascii@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/parse-bmfont-ascii/-/parse-bmfont-ascii-1.0.6.tgz#11ac3c3ff58f7c2020ab22769079108d4dfa0285" integrity sha512-U4RrVsUFCleIOBsIGYOMKjn9PavsGOXxbvYGtMOEfnId0SVNsgehXh1DxUdVPLoxd5mvcEtvmKs2Mmf0Mpa1ZA== -parse-bmfont-binary@^1.0.5: +parse-bmfont-binary@^1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/parse-bmfont-binary/-/parse-bmfont-binary-1.0.6.tgz#d038b476d3e9dd9db1e11a0b0e53a22792b69006" integrity sha512-GxmsRea0wdGdYthjuUeWTMWPqm2+FAd4GI8vCvhgJsFnoGhTrLhXDDupwTo7rXVAgaLIGoVHDZS9p/5XbSqeWA== -parse-bmfont-xml@^1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/parse-bmfont-xml/-/parse-bmfont-xml-1.1.4.tgz#015319797e3e12f9e739c4d513872cd2fa35f389" - integrity sha512-bjnliEOmGv3y1aMEfREMBJ9tfL3WR0i0CKPj61DnSLaoxWR3nLrsQrEbCId/8rF4NyRF0cCqisSVXyQYWM+mCQ== +parse-bmfont-xml@^1.1.6: + version "1.1.6" + resolved "https://registry.yarnpkg.com/parse-bmfont-xml/-/parse-bmfont-xml-1.1.6.tgz#016b655da7aebe6da38c906aca16bf0415773767" + integrity sha512-0cEliVMZEhrFDwMh4SxIyVJpqYoOWDJ9P895tFuS+XuNzI5UBmBk5U5O4KuJdTnZpSBI4LFA2+ZiJaiwfSwlMA== dependencies: xml-parse-from-string "^1.0.0" - xml2js "^0.4.5" + xml2js "^0.5.0" parse-headers@^2.0.0: version "2.0.5" @@ -16815,21 +17268,21 @@ path-scurry@^1.11.1, path-scurry@^1.6.1: minipass "^5.0.0 || ^6.0.2 || ^7.0.0" path-to-regexp@1.x: - version "1.8.0" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.8.0.tgz#887b3ba9d84393e87a0a0b9f4cb756198b53548a" - integrity sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA== + version "1.9.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-1.9.0.tgz#5dc0753acbf8521ca2e0f137b4578b917b10cf24" + integrity sha512-xIp7/apCFJuUHdDLWe8O1HIkb0kQrOMb/0u6FXQjemHn/ii5LrIzU6bdECnsiTF/GjZkMEKg1xdiZwNqDYlZ6g== dependencies: isarray "0.0.1" -path-to-regexp@^0.1.2: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ== +path-to-regexp@^0.1.10, path-to-regexp@^0.1.2: + version "0.1.11" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.11.tgz#a527e662c89efc4646dbfa8100bf3e847e495761" + integrity sha512-c0t+KCuUkO/YDLPG4WWzEwx3J5F/GHXsD1h/SNZfySqAIKe/BaP95x8fWtOfRJokpS5yYHRJjMtYlXD8jxnpbw== -path-to-regexp@^6.1.0: - version "6.2.1" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.2.1.tgz#d54934d6798eb9e5ef14e7af7962c945906918e5" - integrity sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw== +path-to-regexp@^6.1.0, path-to-regexp@^6.3.0: + version "6.3.0" + resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-6.3.0.tgz#2b6a26a337737a8e1416f9272ed0766b1c0389f4" + integrity sha512-Yhpw4T9C6hPpgPeA28us07OJeqZ5EzQTkbfwuhsUg0c237RomFoETJgmp2sa3F/41gfLE6G5cqcYwznmeEeOlQ== path-type@^3.0.0: version "3.0.0" @@ -16949,11 +17402,6 @@ pgpass@1.x: dependencies: split2 "^4.1.0" -phin@^2.9.1: - version "2.9.3" - resolved "https://registry.yarnpkg.com/phin/-/phin-2.9.3.tgz#f9b6ac10a035636fb65dfc576aaaa17b8743125c" - integrity sha512-CzFr90qM24ju5f88quFC/6qohjC144rehe5n6DH900lgXmUe86+xCKc10ev56gRKC4/BkHUoG4uSiQgBiIXwDA== - picocolors@^1.0.0, picocolors@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.1.tgz#a8ad579b571952f0e5d25892de5445bcfe25aaa1" @@ -17111,12 +17559,12 @@ pirates@^4.0.4: resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.5.tgz#feec352ea5c3268fb23a37c702ab1699f35a5f3b" integrity sha512-8V9+HQPupnaXMA23c5hvl69zXvTwTzyAYasnkb0Tts4XvO4CliqONMOnvlq26rkhLC3nWDFBJf73LU1e1VZLaQ== -pixelmatch@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/pixelmatch/-/pixelmatch-4.0.2.tgz#8f47dcec5011b477b67db03c243bc1f3085e8854" - integrity sha512-J8B6xqiO37sU/gkcMglv6h5Jbd9xNER7aHzpfRdNmV4IbQBzBpe4l9XmbG+xPF/znacgu2jfEw+wHffaq/YkXA== +pixelmatch@^5.3.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/pixelmatch/-/pixelmatch-5.3.0.tgz#5e5321a7abedfb7962d60dbf345deda87cb9560a" + integrity sha512-o8mkY4E/+LNUf6LzX96ht6k6CEDi65k9G2rjMtBe9Oo+VPKSvl+0GKHuH/AlG+GA5LPG/i5hrekkxUc3s2HU+Q== dependencies: - pngjs "^3.0.0" + pngjs "^6.0.0" pkg-dir@^4.2.0: version "4.2.0" @@ -17146,16 +17594,16 @@ pluralize@^8.0.0: resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-8.0.0.tgz#1a6fa16a38d12a1901e0320fa017051c539ce3b1" integrity sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA== -pngjs@^3.0.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-3.4.0.tgz#99ca7d725965fb655814eaf65f38f12bbdbf555f" - integrity sha512-NCrCHhWmnQklfH4MtJMRjZ2a8c80qXeMlQMv2uVp9ISJMTt562SbGd6n2oq0PaPgKm7Z6pL9E2UlLIhC+SHL3w== - pngjs@^6.0.0: version "6.0.0" resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-6.0.0.tgz#ca9e5d2aa48db0228a52c419c3308e87720da821" integrity sha512-TRzzuFRRmEoSW/p1KVAmiOgPco2Irlah+bGFCeNfJXxxYGwSw7YwAOAcd7X28K/m5bjBWKsC29KyoMfHbypayg== +pngjs@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-7.0.0.tgz#a8b7446020ebbc6ac739db6c5415a65d17090e26" + integrity sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow== + possible-typed-array-names@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz#89bb63c6fada2c3e90adc4a647beeeb39cc7bf8f" @@ -17514,19 +17962,18 @@ pouch-stream@^0.4.0: inherits "^2.0.1" readable-stream "^1.0.27-1" -pouchdb-abstract-mapreduce@7.2.2: - version "7.2.2" - resolved "https://registry.yarnpkg.com/pouchdb-abstract-mapreduce/-/pouchdb-abstract-mapreduce-7.2.2.tgz#dd1b10a83f8d24361dce9aaaab054614b39f766f" - integrity sha512-7HWN/2yV2JkwMnGnlp84lGvFtnm0Q55NiBUdbBcaT810+clCGKvhssBCrXnmwShD1SXTwT83aszsgiSfW+SnBA== +pouchdb-abstract-mapreduce@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-abstract-mapreduce/-/pouchdb-abstract-mapreduce-9.0.0.tgz#d5f189a6f8980931835c41ea1f0b692368ce4686" + integrity sha512-SnTtqwAEiAa3uxKbc1J7LfiBViwEkKe2xkK92zxyTXPqWBvMnh4UU3GXxx7GrXTM4L9llsQ3lSjpbH4CNqG1Mw== dependencies: - pouchdb-binary-utils "7.2.2" - pouchdb-collate "7.2.2" - pouchdb-collections "7.2.2" - pouchdb-errors "7.2.2" - pouchdb-fetch "7.2.2" - pouchdb-mapreduce-utils "7.2.2" - pouchdb-md5 "7.2.2" - pouchdb-utils "7.2.2" + pouchdb-binary-utils "9.0.0" + pouchdb-collate "9.0.0" + pouchdb-errors "9.0.0" + pouchdb-fetch "9.0.0" + pouchdb-mapreduce-utils "9.0.0" + pouchdb-md5 "9.0.0" + pouchdb-utils "9.0.0" pouchdb-adapter-leveldb-core@7.2.2: version "7.2.2" @@ -17587,10 +18034,15 @@ pouchdb-binary-utils@7.2.2: dependencies: buffer-from "1.1.1" -pouchdb-collate@7.2.2: - version "7.2.2" - resolved "https://registry.yarnpkg.com/pouchdb-collate/-/pouchdb-collate-7.2.2.tgz#fc261f5ef837c437e3445fb0abc3f125d982c37c" - integrity sha512-/SMY9GGasslknivWlCVwXMRMnQ8myKHs4WryQ5535nq1Wj/ehpqWloMwxEQGvZE1Sda3LOm7/5HwLTcB8Our+w== +pouchdb-binary-utils@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-binary-utils/-/pouchdb-binary-utils-9.0.0.tgz#eafed32c21e92ef4b253456f9e53c4cf2cfd99fd" + integrity sha512-2OMtgDZi82vqs+zNDE0YiYjOaWkYCUcZJZKK3WkRr+XYRu+2B7umJrnygJFhUwoGedBbHSrlQBLhdNV3F1AX1A== + +pouchdb-collate@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-collate/-/pouchdb-collate-9.0.0.tgz#654f6766927ada60603ba25b6b2ae533564fa302" + integrity sha512-TrnEDNZEmIIl+W3xKUO8h+geqVLQ90oZe5ujPkl8myUzpREULWXWQBnV5EzPXVEKDBpJlb8T3I6oy/zdWGQpdA== pouchdb-collections@7.2.2: version "7.2.2" @@ -17604,27 +18056,31 @@ pouchdb-errors@7.2.2: dependencies: inherits "2.0.4" -pouchdb-fetch@7.2.2: - version "7.2.2" - resolved "https://registry.yarnpkg.com/pouchdb-fetch/-/pouchdb-fetch-7.2.2.tgz#492791236d60c899d7e9973f9aca0d7b9cc02230" - integrity sha512-lUHmaG6U3zjdMkh8Vob9GvEiRGwJfXKE02aZfjiVQgew+9SLkuOxNw3y2q4d1B6mBd273y1k2Lm0IAziRNxQnA== - dependencies: - abort-controller "3.0.0" - fetch-cookie "0.10.1" - node-fetch "2.6.0" +pouchdb-errors@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-errors/-/pouchdb-errors-9.0.0.tgz#f84269ce3327abef9455c0a90a51c26d7dca20c6" + integrity sha512-961PSMLhW0UqqdJ566g+CdLZ5pkBJRd6l4WWpCDdD0USvE4xYfYGzv43w7nZZBw1k3Xdy092yqPge7yX/tfnyw== -pouchdb-find@7.2.2: - version "7.2.2" - resolved "https://registry.yarnpkg.com/pouchdb-find/-/pouchdb-find-7.2.2.tgz#1227afdd761812d508fe0794b3e904518a721089" - integrity sha512-BmFeFVQ0kHmDehvJxNZl9OmIztCjPlZlVSdpijuFbk/Fi1EFPU1BAv3kLC+6DhZuOqU/BCoaUBY9sn66pPY2ag== +pouchdb-fetch@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-fetch/-/pouchdb-fetch-9.0.0.tgz#a2cf407c75c9fc68a1924b08c9b574d28e1be7dd" + integrity sha512-TbE3cUcAJQrwb9kr44tDP0X+NAbcqgjsTvcL30L4xzBNJeCPTIRjukYX80s154SHJUXBxcWRiPsMmNqpXsjfCA== dependencies: - pouchdb-abstract-mapreduce "7.2.2" - pouchdb-collate "7.2.2" - pouchdb-errors "7.2.2" - pouchdb-fetch "7.2.2" - pouchdb-md5 "7.2.2" - pouchdb-selector-core "7.2.2" - pouchdb-utils "7.2.2" + fetch-cookie "2.2.0" + node-fetch "2.6.9" + +pouchdb-find@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-find/-/pouchdb-find-9.0.0.tgz#3d1b80d2adc9f9fd86c2ad559cd0144e406cb539" + integrity sha512-vvVhq4eEOmSkwSRwf2NBYtdhURB7ryJ7sUI4WDN00GuLUj2g8jAXBJuZIryVgdYt/5S5cfn70iRL6Eow+LFhpA== + dependencies: + pouchdb-abstract-mapreduce "9.0.0" + pouchdb-collate "9.0.0" + pouchdb-errors "9.0.0" + pouchdb-fetch "9.0.0" + pouchdb-md5 "9.0.0" + pouchdb-selector-core "9.0.0" + pouchdb-utils "9.0.0" pouchdb-json@7.2.2: version "7.2.2" @@ -17633,15 +18089,12 @@ pouchdb-json@7.2.2: dependencies: vuvuzela "1.0.3" -pouchdb-mapreduce-utils@7.2.2: - version "7.2.2" - resolved "https://registry.yarnpkg.com/pouchdb-mapreduce-utils/-/pouchdb-mapreduce-utils-7.2.2.tgz#13a46a3cc2a3f3b8e24861da26966904f2963146" - integrity sha512-rAllb73hIkU8rU2LJNbzlcj91KuulpwQu804/F6xF3fhZKC/4JQMClahk+N/+VATkpmLxp1zWmvmgdlwVU4HtQ== +pouchdb-mapreduce-utils@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-mapreduce-utils/-/pouchdb-mapreduce-utils-9.0.0.tgz#8a2edf30ca0fa24d095eabcfbe8ebb8f3f1160e3" + integrity sha512-Bjh8W6QXqp1j7MKmHhYYp5cYlcQsm5drD8Jd/F+ZlfNt18uiD2SQXWzGM5797+tiW/LszFGb8ttw0uHWjxufCQ== dependencies: - argsarray "0.0.1" - inherits "2.0.4" - pouchdb-collections "7.2.2" - pouchdb-utils "7.2.2" + pouchdb-utils "9.0.0" pouchdb-md5@7.2.2: version "7.2.2" @@ -17651,6 +18104,14 @@ pouchdb-md5@7.2.2: pouchdb-binary-utils "7.2.2" spark-md5 "3.0.1" +pouchdb-md5@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-md5/-/pouchdb-md5-9.0.0.tgz#f67a2ba627309e65f8d1ce4d4baf6a5f29164617" + integrity sha512-58xUYBvW3/s+aH0j4uOhhN8yCk0LQ254cxBzI/gbKA9PrfwHpe4zrr0L/ia5ml3A30oH1f8aTnuVMwWDkFcuww== + dependencies: + pouchdb-binary-utils "9.0.0" + spark-md5 "3.0.2" + pouchdb-merge@7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/pouchdb-merge/-/pouchdb-merge-7.2.2.tgz#940d85a2b532d6a93a6cab4b250f5648511bcc16" @@ -17663,13 +18124,13 @@ pouchdb-promise@6.4.3, pouchdb-promise@^6.0.4: dependencies: lie "3.1.1" -pouchdb-selector-core@7.2.2: - version "7.2.2" - resolved "https://registry.yarnpkg.com/pouchdb-selector-core/-/pouchdb-selector-core-7.2.2.tgz#264d7436a8c8ac3801f39960e79875ef7f3879a0" - integrity sha512-XYKCNv9oiNmSXV5+CgR9pkEkTFqxQGWplnVhO3W9P154H08lU0ZoNH02+uf+NjZ2kjse7Q1fxV4r401LEcGMMg== +pouchdb-selector-core@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-selector-core/-/pouchdb-selector-core-9.0.0.tgz#6fee1df82cd5ecdbd0a034b38e6c604557d2e22a" + integrity sha512-ZYHYsdoedwm8j5tYofz+3+uUSK8i+7tRCBb01T0OuqDQb17+w5mzjHF8Ppi160xdPUPaWCo1Un+nLWGJzkmA3g== dependencies: - pouchdb-collate "7.2.2" - pouchdb-utils "7.2.2" + pouchdb-collate "9.0.0" + pouchdb-utils "9.0.0" pouchdb-utils@7.2.2: version "7.2.2" @@ -17685,6 +18146,15 @@ pouchdb-utils@7.2.2: pouchdb-md5 "7.2.2" uuid "8.1.0" +pouchdb-utils@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb-utils/-/pouchdb-utils-9.0.0.tgz#b68f3259add50163998201d1a6d16e6a35d5d57f" + integrity sha512-xWZE5c+nAslgmLC8JBZbky8AYgdz7pKtv7KTSi6CD2tuQD0WyNKib0YnhZndeE84dksTeZlqlg56RQHsHoB2LQ== + dependencies: + pouchdb-errors "9.0.0" + pouchdb-md5 "9.0.0" + uuid "8.3.2" + pouchdb@7.3.0: version "7.3.0" resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-7.3.0.tgz#440fbef12dfd8f9002320802528665e883a3b7f8" @@ -17711,10 +18181,30 @@ pouchdb@7.3.0: uuid "8.3.2" vuvuzela "1.0.3" -pprof-format@^2.0.7: - version "2.0.7" - resolved "https://registry.yarnpkg.com/pprof-format/-/pprof-format-2.0.7.tgz#526e4361f8b37d16b2ec4bb0696b5292de5046a4" - integrity sha512-1qWaGAzwMpaXJP9opRa23nPnt2Egi7RMNoNBptEE/XwHbcn4fC2b/4U4bKc5arkGkIh2ZabpF2bEb+c5GNHEKA== +pouchdb@9.0.0: + version "9.0.0" + resolved "https://registry.yarnpkg.com/pouchdb/-/pouchdb-9.0.0.tgz#569ee3941f7b03dd34b4b4e53132a9772981a35e" + integrity sha512-6wjFc/PzwaWz86rmMXoqdBlR/fBSkNoWO1mEJO7RZNS6n3xf+fhhXWAWtws741KpLKx84IkmmJ48tp+fhFzj4A== + dependencies: + double-ended-queue "2.1.0-0" + fetch-cookie "2.2.0" + level "6.0.1" + level-codec "9.0.2" + level-write-stream "1.0.0" + leveldown "6.1.1" + levelup "4.4.0" + ltgt "2.2.1" + node-fetch "2.6.9" + readable-stream "1.1.14" + spark-md5 "3.0.2" + through2 "3.0.2" + uuid "8.3.2" + vuvuzela "1.0.3" + +pprof-format@^2.0.7, pprof-format@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/pprof-format/-/pprof-format-2.1.0.tgz#acc8d7773bcf4faf0a3d3df11bceefba7ac06664" + integrity sha512-0+G5bHH0RNr8E5hoZo/zJYsL92MhkZjwrHp3O2IxmY8RJL9ooKeuZ8Tm0ZNBw5sGZ9TiM71sthTjWoR2Vf5/xw== preact@^10.19.3: version "10.20.1" @@ -18104,7 +18594,7 @@ querystringify@^2.1.1: resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== -queue-microtask@^1.2.2: +queue-microtask@^1.2.2, queue-microtask@^1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== @@ -18284,6 +18774,15 @@ readable-stream@^2.0.0, readable-stream@^2.0.5, readable-stream@^2.2.2, readable string_decoder "~1.1.1" util-deprecate "~1.0.1" +readable-stream@^3.6.2: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + readable-stream@^4.0.0, readable-stream@^4.2.0: version "4.5.1" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-4.5.1.tgz#3f2e4e66eab45606ac8f31597b9edb80c13b12ab" @@ -18426,11 +18925,6 @@ regenerate@^1.4.2: resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.2.tgz#b9346d8827e8f5a32f7ba29637d398b69014848a" integrity sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A== -regenerator-runtime@^0.13.3: - version "0.13.11" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz#f6dca3e7ceec20590d07ada785636a90cdca17f9" - integrity sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg== - regenerator-runtime@^0.14.0: version "0.14.0" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.14.0.tgz#5e19d68eb12d486f797e15a3c6a918f7cec5eb45" @@ -18453,6 +18947,16 @@ regexp.prototype.flags@^1.5.1, regexp.prototype.flags@^1.5.2: es-errors "^1.3.0" set-function-name "^2.0.1" +regexp.prototype.flags@^1.5.3: + version "1.5.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.5.3.tgz#b3ae40b1d2499b8350ab2c3fe6ef3845d3a96f42" + integrity sha512-vqlC04+RQoFalODCbCumG2xIOvapzVMHwsyIGM/SIE8fRhFFsXeH8/QQ+s0T0kDAhKc4k30s73/0ydkHQz6HlQ== + dependencies: + call-bind "^1.0.7" + define-properties "^1.2.1" + es-errors "^1.3.0" + set-function-name "^2.0.2" + regexparam@2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/regexparam/-/regexparam-2.0.2.tgz#a0f6aa057c67b1c9c09508c45823c0755b1f6e58" @@ -18639,14 +19143,6 @@ restore-cursor@^3.1.0: onetime "^5.1.0" signal-exit "^3.0.2" -retry-request@^5.0.0: - version "5.0.2" - resolved "https://registry.yarnpkg.com/retry-request/-/retry-request-5.0.2.tgz#143d85f90c755af407fcc46b7166a4ba520e44da" - integrity sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ== - dependencies: - debug "^4.1.1" - extend "^3.0.2" - retry-request@^7.0.0: version "7.0.2" resolved "https://registry.yarnpkg.com/retry-request/-/retry-request-7.0.2.tgz#60bf48cfb424ec01b03fca6665dee91d06dd95f3" @@ -18671,10 +19167,10 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== -rfdc@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" - integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== +rfdc@^1.3.0, rfdc@^1.3.1: + version "1.4.1" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.4.1.tgz#778f76c4fb731d93414e8f925fbecf64cce7f6ca" + integrity sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA== rimraf@3.0.2, rimraf@^3.0.2: version "3.0.2" @@ -19106,6 +19602,11 @@ set-blocking@^2.0.0: resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" integrity sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw== +set-cookie-parser@^2.4.8: + version "2.7.1" + resolved "https://registry.yarnpkg.com/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz#3016f150072202dfbe90fadee053573cc89d2943" + integrity sha512-IOc8uWeOZgnb3ptbCURJWNjWUPcO3ZnTTdzsurqERrP6nPyv+paC55vJM0LpOlT2ne+Ix+9+CRG1MNLlyZ4GjQ== + set-function-length@^1.2.1: version "1.2.2" resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449" @@ -19182,6 +19683,11 @@ shell-exec@1.0.2: resolved "https://registry.yarnpkg.com/shell-exec/-/shell-exec-1.0.2.tgz#2e9361b0fde1d73f476c4b6671fa17785f696756" integrity sha512-jyVd+kU2X+mWKMmGhx4fpWbPsjvD53k9ivqetutVW/BQ+WIZoDoP4d8vUMGezV6saZsiNoW2f9GIhg9Dondohg== +shell-quote@^1.8.1: + version "1.8.1" + resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.8.1.tgz#6dbf4db75515ad5bac63b4f1894c3a154c766680" + integrity sha512-6j1W9l1iAs/4xYBI1SYOVZyFcCis9b4KCLQ8fgAGG07QvzaRLVVRQvAy85yNmmZSjYjg4MWh4gNvlPujU/5LpA== + shortid@2.2.15: version "2.2.15" resolved "https://registry.yarnpkg.com/shortid/-/shortid-2.2.15.tgz#2b902eaa93a69b11120373cd42a1f1fe4437c122" @@ -19295,38 +19801,30 @@ smob@^1.0.0: resolved "https://registry.yarnpkg.com/smob/-/smob-1.5.0.tgz#85d79a1403abf128d24d3ebc1cdc5e1a9548d3ab" integrity sha512-g6T+p7QO8npa+/hNx9ohv1E5pVCmWrVCUzUXJyLdMmftX6ER0oiWY/w9knEonLpnOp6b6FenKnMfR8gqwWdwig== -snowflake-promise@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/snowflake-promise/-/snowflake-promise-4.5.0.tgz#ceba611d27b3792966bc752c545760e0ce168c1c" - integrity sha512-IFY7Y1alCTY1WRFPIEcgCbjy7wCajwLNnJsvw2L7xdePir7y5ohh+S00PnF9zFRGbfVVlRh/VYqOYHEfERK2lg== - dependencies: - snowflake-sdk "^1.6.0" - -snowflake-sdk@^1.6.0: - version "1.9.0" - resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.9.0.tgz#3bd089427549efc8efa4829c2d08deeffe4aded3" - integrity "sha1-O9CJQnVJ78jvpIKcLQje7/5K3tM= sha512-RtFRV2KC+ebQk/kOUg8WV42LnAu9puoan2wMXykgrAj1u4sGP/GgQyQhsAfLGwXWzn+J9JAwij07h3+6HYBmFw==" +snowflake-sdk@^1.15.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.15.0.tgz#cc32fa0f2869d9e5a026e293b50d387ddbd67aca" + integrity sha512-u7eNIT2JWkA8USJF6gTOCcReNrdh8V9LCazJi3F0XnX5ZJkgPz2gNSn67drT4ywqNaXdXfFM0i/yNSa58fi2Rg== dependencies: "@aws-sdk/client-s3" "^3.388.0" - "@azure/storage-blob" "^12.11.0" - "@google-cloud/storage" "^6.9.3" - "@techteamer/ocsp" "1.0.0" - agent-base "^6.0.2" + "@aws-sdk/node-http-handler" "^3.374.0" + "@azure/storage-blob" "12.18.x" + "@google-cloud/storage" "^7.7.0" + "@techteamer/ocsp" "1.0.1" asn1.js-rfc2560 "^5.0.0" asn1.js-rfc5280 "^3.0.0" - axios "^1.5.0" + axios "^1.6.8" big-integer "^1.6.43" - bignumber.js "^2.4.0" + bignumber.js "^9.1.2" binascii "0.0.2" bn.js "^5.2.1" browser-request "^0.3.3" - debug "^3.2.6" expand-tilde "^2.0.2" - extend "^3.0.2" fast-xml-parser "^4.2.5" + fastest-levenshtein "^1.0.16" generic-pool "^3.8.2" - glob "^7.1.6" - https-proxy-agent "^5.0.1" + glob "^10.0.0" + https-proxy-agent "^7.0.2" jsonwebtoken "^9.0.0" mime-types "^2.1.29" mkdirp "^1.0.3" @@ -19335,8 +19833,7 @@ snowflake-sdk@^1.6.0: open "^7.3.1" python-struct "^1.1.3" simple-lru-cache "^0.0.2" - string-similarity "^4.0.4" - tmp "^0.2.1" + toml "^3.0.0" uuid "^8.3.2" winston "^3.1.0" @@ -19366,16 +19863,16 @@ socket.io-parser@~4.2.4: "@socket.io/component-emitter" "~3.1.0" debug "~4.3.1" -socket.io@4.7.5: - version "4.7.5" - resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.7.5.tgz#56eb2d976aef9d1445f373a62d781a41c7add8f8" - integrity sha512-DmeAkF6cwM9jSfmp6Dr/5/mfMwb5Z5qRrSXLpo3Fq5SqyU8CMF15jIN4ZhfSwu35ksM1qmHZDQ/DK5XTccSTvA== +socket.io@4.8.1: + version "4.8.1" + resolved "https://registry.yarnpkg.com/socket.io/-/socket.io-4.8.1.tgz#fa0eaff965cc97fdf4245e8d4794618459f7558a" + integrity sha512-oZ7iUCxph8WYRHHcjBEc9unw3adt5CmSNlppj/5Q4k2RIrhl8Z5yY2Xr4j9zj0+wzVZ0bxmYoGSzKJnRl6A4yg== dependencies: accepts "~1.3.4" base64id "~2.0.0" cors "~2.8.5" debug "~4.3.2" - engine.io "~6.5.2" + engine.io "~6.6.0" socket.io-adapter "~2.5.2" socket.io-parser "~4.2.4" @@ -19568,6 +20065,11 @@ sprintf-js@^1.1.1, sprintf-js@^1.1.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.2.tgz#da1765262bf8c0f571749f2ad6c26300207ae673" integrity sha512-VE0SOVEHCk7Qc8ulkWw3ntAzXuqf7S2lvwQaDLRnUeIEaKNQJzV6BwmLKhOqT61aGhfUMrXeaBk+oDGCzvhcug== +sprintf-js@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.1.3.tgz#4914b903a2f8b685d17fdf78a70e917e872e444a" + integrity sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA== + sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" @@ -19598,9 +20100,9 @@ ssh2@^1.11.0, ssh2@^1.4.0: nan "^2.18.0" sshpk@^1.7.0: - version "1.17.0" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" - integrity sha512-/9HIEs1ZXGhSPE8X6Ccm7Nam1z8KcoCqPdI7ecm1N33EzAetWahvQWVqLZtaZQ+IDKX4IyA2o0gBzqIMkAagHQ== + version "1.18.0" + resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.18.0.tgz#1663e55cddf4d688b86a46b77f0d5fe363aba028" + integrity sha512-2p2KJZTSqQ/I3+HX42EpYOa2l3f8Erv8MWKsy2I9uf4wA7yFIkXRffYdsx86y6z4vHtV8u7g+pPlr8/4ouAxsQ== dependencies: asn1 "~0.2.3" assert-plus "^1.0.0" @@ -19697,6 +20199,11 @@ stream-shift@^1.0.0: resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d" integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== +stream-shift@^1.0.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.3.tgz#85b8fab4d71010fc3ba8772e8046cc49b8a3864b" + integrity sha512-76ORR0DO1o1hlKwTbi/DM3EXWGf3ZJYO8cXX5RJwnul2DEg2oyoZyjLNoQM8WsvZiFKCRfC1O0J7iCvie3RZmQ== + stream-to-array@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/stream-to-array/-/stream-to-array-2.3.0.tgz#bbf6b39f5f43ec30bc71babcb37557acecf34353" @@ -19742,11 +20249,6 @@ string-range@~1.2, string-range@~1.2.1: resolved "https://registry.yarnpkg.com/string-range/-/string-range-1.2.2.tgz#a893ed347e72299bc83befbbf2a692a8d239d5dd" integrity sha512-tYft6IFi8SjplJpxCUxyqisD3b+R2CSkomrtJYCkvuf1KuCAWgz7YXt4O0jip7efpfCemwHEzTEAO8EuOYgh3w== -string-similarity@^4.0.4: - version "4.0.4" - resolved "https://registry.yarnpkg.com/string-similarity/-/string-similarity-4.0.4.tgz#42d01ab0b34660ea8a018da8f56a3309bb8b2a5b" - integrity sha512-/q/8Q4Bl4ZKAPjj8WerIBJWALKkaPRfrvhfF8k/B23i4nzrlRj2/go1m90In7nG/3XDSbOo0+pu6RvCTM9RGMQ== - "string-width-cjs@npm:string-width@^4.2.0": version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" @@ -19784,12 +20286,13 @@ string-width@^5.0.0, string-width@^5.0.1, string-width@^5.1.2: strip-ansi "^7.0.1" string.prototype.startswith@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/string.prototype.startswith/-/string.prototype.startswith-1.0.0.tgz#92a361fb1ac172033d53eb1db3d659b0cfab6280" - integrity sha512-VHhsDkuf8gsw4JNRK9cIZjYe6r7PsVUutVohaBhqYAoPaRADoQH+mMgUg7Cs/TgQeDGEvI+PzPEMOdvdsCMvpg== + version "1.0.1" + resolved "https://registry.yarnpkg.com/string.prototype.startswith/-/string.prototype.startswith-1.0.1.tgz#623e2d013d93d3d2bbfbc9eed9e1010ba3f50ce8" + integrity sha512-7FoHkxvUevSBxSBXqsJgQy+IwuSPVl1jF31FEagFxkKnNKnmRLcHY6cJgxy074qrFq9T0OE36OU5aPw+z1v0yw== dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.5" + call-bind "^1.0.7" + define-properties "^1.2.1" + es-abstract "^1.23.3" string.prototype.trim@^1.2.9: version "1.2.9" @@ -20326,33 +20829,21 @@ tarn@^3.0.1, tarn@^3.0.2: resolved "https://registry.yarnpkg.com/tarn/-/tarn-3.0.2.tgz#73b6140fbb881b71559c4f8bfde3d9a4b3d27693" integrity sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ== -tedious@^16.4.0: - version "16.7.1" - resolved "https://registry.yarnpkg.com/tedious/-/tedious-16.7.1.tgz#1190f30fd99a413f1dc9250dee4835cf0788b650" - integrity sha512-NmedZS0NJiTv3CoYnf1FtjxIDUgVYzEmavrc8q2WHRb+lP4deI9BpQfmNnBZZaWusDbP5FVFZCcvzb3xOlNVlQ== +tedious@^18.2.1: + version "18.6.1" + resolved "https://registry.yarnpkg.com/tedious/-/tedious-18.6.1.tgz#1c4a3f06c891be67a032117e2e25193286d44496" + integrity sha512-9AvErXXQTd6l7TDd5EmM+nxbOGyhnmdbp/8c3pw+tjaiSXW9usME90ET/CRG1LN1Y9tPMtz/p83z4Q97B4DDpw== dependencies: - "@azure/identity" "^3.4.1" + "@azure/core-auth" "^1.7.2" + "@azure/identity" "^4.2.1" "@azure/keyvault-keys" "^4.4.0" - "@js-joda/core" "^5.5.3" - bl "^6.0.3" - es-aggregate-error "^1.0.9" + "@js-joda/core" "^5.6.1" + "@types/node" ">=18" + bl "^6.0.11" iconv-lite "^0.6.3" js-md4 "^0.3.2" - jsbi "^4.3.0" native-duplexpair "^1.0.0" - node-abort-controller "^3.1.1" - sprintf-js "^1.1.2" - -teeny-request@^8.0.0: - version "8.0.3" - resolved "https://registry.yarnpkg.com/teeny-request/-/teeny-request-8.0.3.tgz#5cb9c471ef5e59f2fca8280dc3c5909595e6ca24" - integrity "sha1-XLnEce9eWfL8qCgNw8WQlZXmyiQ= sha512-jJZpA5He2y52yUhA7pyAGZlgQpcB+xLjcN0eUFxr9c8hP/H7uOXbBNVo/O0C/xVfJLJs680jvkFgVJEEvk9+ww==" - dependencies: - http-proxy-agent "^5.0.0" - https-proxy-agent "^5.0.0" - node-fetch "^2.6.1" - stream-events "^1.0.5" - uuid "^9.0.0" + sprintf-js "^1.1.3" teeny-request@^9.0.0: version "9.0.0" @@ -20494,11 +20985,6 @@ timekeeper@^2.2.0: resolved "https://registry.yarnpkg.com/timekeeper/-/timekeeper-2.3.1.tgz#2deb6e0b95d93625fda84c18d47f84a99e4eba01" integrity sha512-LeQRS7/4JcC0PgdSFnfUiStQEdiuySlCj/5SJ18D+T1n9BoY7PxKFfCwLulpHXoLUFr67HxBddQdEX47lDGx1g== -timm@^1.6.1: - version "1.7.1" - resolved "https://registry.yarnpkg.com/timm/-/timm-1.7.1.tgz#96bab60c7d45b5a10a8a4d0f0117c6b7e5aff76f" - integrity sha512-IjZc9KIotudix8bMaBW6QvMuq64BrJWFs1+4V0lXwWGQZwH+LnX87doAYhem4caOEusRP9/g6jVDQmZ8XOk1nw== - tiny-glob@^0.2.9: version "0.2.9" resolved "https://registry.yarnpkg.com/tiny-glob/-/tiny-glob-0.2.9.tgz#2212d441ac17928033b110f8b3640683129d31e2" @@ -20606,6 +21092,11 @@ token-types@^4.1.1: "@tokenizer/token" "^0.3.0" ieee754 "^1.2.1" +toml@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/toml/-/toml-3.0.0.tgz#342160f1af1904ec9d204d03a5d61222d762c5ee" + integrity sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w== + toposort@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/toposort/-/toposort-2.0.2.tgz#ae21768175d1559d48bef35420b2f4962f09c330" @@ -20660,9 +21151,9 @@ trim-repeated@^1.0.0: escape-string-regexp "^1.0.2" triple-beam@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9" - integrity sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw== + version "1.4.1" + resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.4.1.tgz#6fde70271dc6e5d73ca0c3b24e2d92afb7441984" + integrity sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg== ts-api-utils@^1.0.1, ts-api-utils@^1.3.0: version "1.3.0" @@ -20735,16 +21226,21 @@ tsconfig-paths@^4.1.2, tsconfig-paths@^4.2.0: minimist "^1.2.6" strip-bom "^3.0.0" -tslib@^1.10.0, tslib@^1.11.1, tslib@^1.8.1, tslib@^1.9.0: +tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.0, tslib@^2.3.1, tslib@^2.4.0, tslib@^2.4.1, tslib@^2.5.0, tslib@^2.6.2: +tslib@^2.1.0, tslib@^2.2.0, tslib@^2.3.0, tslib@^2.4.0, tslib@^2.4.1, tslib@^2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.6.3.tgz#0438f810ad7a9edcde7a241c3d80db693c8cbfe0" integrity sha512-xNvxJEOUiWPGhUuUdQgAJPKOOJfGnIyKySOc09XkKsgdUV/3E2zvwZYdejjmRgPCgcym1juLH3226yA7sEFJKQ== +tslib@^2.5.0: + version "2.8.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.8.1.tgz#612efe4ed235d567e8aba5f2a5fab70280ade83f" + integrity sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w== + tsscmp@1.0.6: version "1.0.6" resolved "https://registry.yarnpkg.com/tsscmp/-/tsscmp-1.0.6.tgz#85b99583ac3589ec4bfef825b5000aa911d605eb" @@ -21196,7 +21692,7 @@ utf-8-validate@^5.0.2: dependencies: node-gyp-build "^4.3.0" -utif2@^4.0.1: +utif2@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/utif2/-/utif2-4.1.0.tgz#e768d37bd619b995d56d9780b5d2b4611a3d932b" integrity sha512-+oknB9FHrJ7oW7A2WZYajOcv4FcDR4CfoGB0dPNfxbi4GO05RRnFmt5oa23+9w32EanrYcSJWspUiJkLMs+37w== @@ -21208,6 +21704,17 @@ util-deprecate@^1.0.1, util-deprecate@^1.0.2, util-deprecate@~1.0.1: resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" integrity sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw== +util@^0.12.4: + version "0.12.5" + resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" + integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== + dependencies: + inherits "^2.0.3" + is-arguments "^1.0.4" + is-generator-function "^1.0.7" + is-typed-array "^1.1.3" + which-typed-array "^1.1.2" + utils-merge@1.x.x, utils-merge@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" @@ -21218,10 +21725,10 @@ uuid-random@^1.3.2: resolved "https://registry.yarnpkg.com/uuid-random/-/uuid-random-1.3.2.tgz#96715edbaef4e84b1dcf5024b00d16f30220e2d0" integrity sha512-UOzej0Le/UgkbWEO8flm+0y+G+ljUon1QWTEZOq1rnMAsxo2+SckbiZdKzAHHlVh6gJqI1TjC/xwgR50MuCrBQ== -uuid@3.3.2: - version "3.3.2" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.3.2.tgz#1b4af4955eb3077c501c23872fc6513811587131" - integrity sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA== +uuid@8.0.0: + version "8.0.0" + resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.0.0.tgz#bc6ccf91b5ff0ac07bbcdbf1c7c4e150db4dbb6c" + integrity sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw== uuid@8.1.0: version "8.1.0" @@ -21488,11 +21995,6 @@ whatwg-encoding@^2.0.0: dependencies: iconv-lite "0.6.3" -whatwg-fetch@^3.4.1: - version "3.6.20" - resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz#580ce6d791facec91d37c72890995a0b48d31c70" - integrity sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg== - whatwg-mimetype@^2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" @@ -21562,7 +22064,7 @@ which-module@^2.0.0: resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" integrity sha512-B+enWhmw6cjfVC7kS8Pj9pCrKSc5txArRyaYGe088shv/FGWH+0Rjx/xPgtsWfsUtS27FkP697E4DDhgrgoc0Q== -which-typed-array@^1.1.13, which-typed-array@^1.1.14, which-typed-array@^1.1.15: +which-typed-array@^1.1.13, which-typed-array@^1.1.14, which-typed-array@^1.1.15, which-typed-array@^1.1.2: version "1.1.15" resolved "https://registry.yarnpkg.com/which-typed-array/-/which-typed-array-1.1.15.tgz#264859e9b11a649b388bfaaf4f767df1f779b38d" integrity sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA== @@ -21616,31 +22118,31 @@ widest-line@^3.1.0: dependencies: string-width "^4.0.0" -winston-transport@^4.5.0: - version "4.5.0" - resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.5.0.tgz#6e7b0dd04d393171ed5e4e4905db265f7ab384fa" - integrity sha512-YpZzcUzBedhlTAfJg6vJDlyEai/IFMIVcaEZZyl3UXIl4gmqRpU7AE89AHLkbzLUsv0NVmw7ts+iztqKxxPW1Q== +winston-transport@^4.9.0: + version "4.9.0" + resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.9.0.tgz#3bba345de10297654ea6f33519424560003b3bf9" + integrity sha512-8drMJ4rkgaPo1Me4zD/3WLfI/zPdA9o2IipKODunnGDcuqbHwjsbB79ylv04LCGGzU0xQ6vTznOMpQGaLhhm6A== dependencies: - logform "^2.3.2" - readable-stream "^3.6.0" + logform "^2.7.0" + readable-stream "^3.6.2" triple-beam "^1.3.0" winston@^3.1.0: - version "3.8.2" - resolved "https://registry.yarnpkg.com/winston/-/winston-3.8.2.tgz#56e16b34022eb4cff2638196d9646d7430fdad50" - integrity sha512-MsE1gRx1m5jdTTO9Ld/vND4krP2To+lgDoMEHGGa4HIlAUyXJtfc7CxQcGXVyz2IBpw5hbFkj2b/AtUdQwyRew== + version "3.17.0" + resolved "https://registry.yarnpkg.com/winston/-/winston-3.17.0.tgz#74b8665ce9b4ea7b29d0922cfccf852a08a11423" + integrity sha512-DLiFIXYC5fMPxaRg832S6F5mJYvePtmO5G9v9IgUFPhXm9/GkXarH/TUrBAVzhTCzAj9anE/+GjrgXp/54nOgw== dependencies: - "@colors/colors" "1.5.0" + "@colors/colors" "^1.6.0" "@dabh/diagnostics" "^2.0.2" async "^3.2.3" is-stream "^2.0.0" - logform "^2.4.0" + logform "^2.7.0" one-time "^1.0.0" readable-stream "^3.4.0" safe-stable-stringify "^2.3.1" stack-trace "0.0.x" triple-beam "^1.3.0" - winston-transport "^4.5.0" + winston-transport "^4.9.0" word-wrap@~1.2.3: version "1.2.5" @@ -21788,7 +22290,7 @@ xdg-basedir@^4.0.0: resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== -xhr@^2.0.1, xhr@^2.4.1: +xhr@^2.4.1: version "2.6.0" resolved "https://registry.yarnpkg.com/xhr/-/xhr-2.6.0.tgz#b69d4395e792b4173d6b7df077f0fc5e4e2b249d" integrity sha512-/eCGLb5rxjx5e3mF1A7s+pLlR6CGyqWN91fv1JgER5mVWg1MZmlhBvy9kjcsOdRk8RrIujotWyJamfyrp+WIcA== @@ -21813,7 +22315,7 @@ xml-parse-from-string@^1.0.0: resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28" integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g== -xml2js@0.1.x, xml2js@0.4.19, xml2js@0.5.0, xml2js@0.6.2, xml2js@^0.4.19, xml2js@^0.4.5: +xml2js@0.1.x, xml2js@0.6.2, xml2js@^0.5.0: version "0.6.2" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499" integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==