From f25dd387b13bdece0ba134ceffa125917717259f Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 18 Nov 2024 16:56:11 +0000 Subject: [PATCH 01/16] Converting one test case to try using describe directly. --- .../server/src/api/routes/tests/row.spec.ts | 6153 ++++++++--------- .../src/integrations/tests/utils/index.ts | 60 +- 2 files changed, 3097 insertions(+), 3116 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 5bdd341beb..2eb7c45e58 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -3,6 +3,7 @@ import * as setup from "./utilities" import { DatabaseName, datasourceDescribe, + datasourceProps, } from "../../../integrations/tests/utils" import tk from "timekeeper" @@ -85,310 +86,618 @@ function encodeJS(binding: string) { return `{{ js "${Buffer.from(binding).toString("base64")}"}}` } -datasourceDescribe( - { name: "/rows (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, dsProvider, isInternal, isMSSQL, isOracle }) => { - let table: Table - let datasource: Datasource | undefined - let client: Knex | undefined +const databases = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - client = ds.client - }) +describe.each(databases)("/rows (%s)", dbName => { + const { config, dsProvider, isInternal, isMSSQL, isOracle } = + datasourceProps(dbName) + let table: Table + let datasource: Datasource | undefined + let client: Knex | undefined - afterAll(async () => { - setup.afterAll() - }) + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource + client = ds.client + }) - function saveTableRequest( - // We omit the name field here because it's generated in the function with a - // high likelihood to be unique. Tests should not have any reason to control - // the table name they're writing to. - ...overrides: Partial>[] - ): SaveTableRequest { - const defaultSchema: TableSchema = { - id: { - type: FieldType.NUMBER, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, + afterAll(async () => { + setup.afterAll() + }) + + function saveTableRequest( + // We omit the name field here because it's generated in the function with a + // high likelihood to be unique. Tests should not have any reason to control + // the table name they're writing to. + ...overrides: Partial>[] + ): SaveTableRequest { + const defaultSchema: TableSchema = { + id: { + type: FieldType.NUMBER, + name: "id", + autocolumn: true, + constraints: { + presence: true, }, - } - - for (const override of overrides) { - if (override.primary) { - delete defaultSchema.id - } - } - - const req: SaveTableRequest = { - name: uuid.v4().substring(0, 10), - type: "table", - sourceType: datasource - ? TableSourceType.EXTERNAL - : TableSourceType.INTERNAL, - sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, - primary: ["id"], - schema: defaultSchema, - } - const merged = merge(req, ...overrides) - return merged + }, } - function defaultTable( - // We omit the name field here because it's generated in the function with a - // high likelihood to be unique. Tests should not have any reason to control - // the table name they're writing to. - ...overrides: Partial>[] - ): SaveTableRequest { - return saveTableRequest( + for (const override of overrides) { + if (override.primary) { + delete defaultSchema.id + } + } + + const req: SaveTableRequest = { + name: uuid.v4().substring(0, 10), + type: "table", + sourceType: datasource + ? TableSourceType.EXTERNAL + : TableSourceType.INTERNAL, + sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, + primary: ["id"], + schema: defaultSchema, + } + const merged = merge(req, ...overrides) + return merged + } + + function defaultTable( + // We omit the name field here because it's generated in the function with a + // high likelihood to be unique. Tests should not have any reason to control + // the table name they're writing to. + ...overrides: Partial>[] + ): SaveTableRequest { + return saveTableRequest( + { + primaryDisplay: "name", + schema: { + name: { + type: FieldType.STRING, + name: "name", + constraints: { + type: "string", + }, + }, + description: { + type: FieldType.STRING, + name: "description", + constraints: { + type: "string", + }, + }, + }, + }, + ...overrides + ) + } + + beforeEach(async () => { + mocks.licenses.useCloudFree() + }) + + const getRowUsage = async () => { + const { total } = await config.doInContext(undefined, () => + quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) + ) + return total + } + + const assertRowUsage = async (expected: number) => { + const usage = await getRowUsage() + + // Because our quota tracking is not perfect, we allow a 10% margin of + // error. This is to account for the fact that parallel writes can result + // in some quota updates getting lost. We don't have any need to solve this + // right now, so we just allow for some error. + if (expected === 0) { + expect(usage).toEqual(0) + return + } + expect(usage).toBeGreaterThan(expected * 0.9) + expect(usage).toBeLessThan(expected * 1.1) + } + + const defaultRowFields = isInternal + ? { + type: "row", + createdAt: timestamp, + updatedAt: timestamp, + } + : undefined + + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + describe("create", () => { + it("creates a new row successfully", async () => { + const rowUsage = await getRowUsage() + const row = await config.api.row.save(table._id!, { + name: "Test Contact", + }) + expect(row.name).toEqual("Test Contact") + expect(row._rev).toBeDefined() + await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage) + }) + + it("fails to create a row for a table that does not exist", async () => { + const rowUsage = await getRowUsage() + await config.api.row.save("1234567", {}, { status: 404 }) + await assertRowUsage(rowUsage) + }) + + it("fails to create a row if required fields are missing", async () => { + const rowUsage = await getRowUsage() + const table = await config.api.table.save( + saveTableRequest({ + schema: { + required: { + type: FieldType.STRING, + name: "required", + constraints: { + type: "string", + presence: true, + }, + }, + }, + }) + ) + await config.api.row.save( + table._id!, + {}, { - primaryDisplay: "name", + status: 500, + body: { + validationErrors: { + required: ["can't be blank"], + }, + }, + } + ) + await assertRowUsage(rowUsage) + }) + + isInternal && + it("increment row autoId per create row request", async () => { + const rowUsage = await getRowUsage() + + const newTable = await config.api.table.save( + saveTableRequest({ + schema: { + "Row ID": { + name: "Row ID", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, + }, + }, + }) + ) + + let previousId = 0 + for (let i = 0; i < 10; i++) { + const row = await config.api.row.save(newTable._id!, {}) + expect(row["Row ID"]).toBeGreaterThan(previousId) + previousId = row["Row ID"] + } + await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage) + }) + + isInternal && + it("should increment auto ID correctly when creating rows in parallel", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + "Row ID": { + name: "Row ID", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, + }, + }, + }) + ) + + const sequence = Array(50) + .fill(0) + .map((_, i) => i + 1) + + // This block of code is simulating users creating auto ID rows at the + // same time. It's expected that this operation will sometimes return + // a document conflict error (409), but the idea is to retry in those + // situations. The code below does this a large number of times with + // small, random delays between them to try and get through the list + // as quickly as possible. + await Promise.all( + sequence.map(async () => { + const attempts = 30 + for (let attempt = 0; attempt < attempts; attempt++) { + try { + await config.api.row.save(table._id!, {}) + return + } catch (e) { + await new Promise(r => setTimeout(r, Math.random() * 50)) + } + } + throw new Error(`Failed to create row after ${attempts} attempts`) + }) + ) + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(50) + + // The main purpose of this test is to ensure that even under pressure, + // we maintain data integrity. An auto ID column should hand out + // monotonically increasing unique integers no matter what. + const ids = rows.map(r => r["Row ID"]) + expect(ids).toEqual(expect.arrayContaining(sequence)) + }) + + isInternal && + it("doesn't allow creating in user table", async () => { + const response = await config.api.row.save( + InternalTable.USER_METADATA, + { + firstName: "Joe", + lastName: "Joe", + email: "joe@joe.com", + roles: {}, + }, + { status: 400 } + ) + expect(response.message).toBe("Cannot create new user entry.") + }) + + it("should not mis-parse date string out of JSON", async () => { + const table = await config.api.table.save( + saveTableRequest({ schema: { name: { type: FieldType.STRING, name: "name", - constraints: { - type: "string", - }, - }, - description: { - type: FieldType.STRING, - name: "description", - constraints: { - type: "string", - }, }, }, - }, - ...overrides - ) - } - - beforeEach(async () => { - mocks.licenses.useCloudFree() - }) - - const getRowUsage = async () => { - const { total } = await config.doInContext(undefined, () => - quotas.getCurrentUsageValues( - QuotaUsageType.STATIC, - StaticQuotaName.ROWS - ) - ) - return total - } - - const assertRowUsage = async (expected: number) => { - const usage = await getRowUsage() - - // Because our quota tracking is not perfect, we allow a 10% margin of - // error. This is to account for the fact that parallel writes can result - // in some quota updates getting lost. We don't have any need to solve this - // right now, so we just allow for some error. - if (expected === 0) { - expect(usage).toEqual(0) - return - } - expect(usage).toBeGreaterThan(expected * 0.9) - expect(usage).toBeLessThan(expected * 1.1) - } - - const defaultRowFields = isInternal - ? { - type: "row", - createdAt: timestamp, - updatedAt: timestamp, - } - : undefined - - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - describe("create", () => { - it("creates a new row successfully", async () => { - const rowUsage = await getRowUsage() - const row = await config.api.row.save(table._id!, { - name: "Test Contact", }) - expect(row.name).toEqual("Test Contact") - expect(row._rev).toBeDefined() - await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage) + ) + + const row = await config.api.row.save(table._id!, { + name: `{ "foo": "2023-01-26T11:48:57.000Z" }`, }) - it("fails to create a row for a table that does not exist", async () => { - const rowUsage = await getRowUsage() - await config.api.row.save("1234567", {}, { status: 404 }) - await assertRowUsage(rowUsage) - }) + expect(row.name).toEqual(`{ "foo": "2023-01-26T11:48:57.000Z" }`) + }) - it("fails to create a row if required fields are missing", async () => { - const rowUsage = await getRowUsage() - const table = await config.api.table.save( - saveTableRequest({ - schema: { - required: { - type: FieldType.STRING, - name: "required", - constraints: { - type: "string", - presence: true, - }, - }, - }, - }) - ) - await config.api.row.save( - table._id!, - {}, - { - status: 500, - body: { - validationErrors: { - required: ["can't be blank"], - }, - }, - } - ) - await assertRowUsage(rowUsage) - }) + describe("default values", () => { + let table: Table - isInternal && - it("increment row autoId per create row request", async () => { - const rowUsage = await getRowUsage() - - const newTable = await config.api.table.save( + describe("string column", () => { + beforeAll(async () => { + table = await config.api.table.save( saveTableRequest({ schema: { - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, - constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, - }, + description: { + name: "description", + type: FieldType.STRING, + default: "default description", }, }, }) ) - - let previousId = 0 - for (let i = 0; i < 10; i++) { - const row = await config.api.row.save(newTable._id!, {}) - expect(row["Row ID"]).toBeGreaterThan(previousId) - previousId = row["Row ID"] - } - await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage) }) - isInternal && - it("should increment auto ID correctly when creating rows in parallel", async () => { + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.description).toEqual("default description") + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + description: "specified description", + }) + expect(row.description).toEqual("specified description") + }) + + it("uses the default value if value is null", async () => { + const row = await config.api.row.save(table._id!, { + description: null, + }) + expect(row.description).toEqual("default description") + }) + + it("uses the default value if value is undefined", async () => { + const row = await config.api.row.save(table._id!, { + description: undefined, + }) + expect(row.description).toEqual("default description") + }) + }) + + describe("number column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + age: { + name: "age", + type: FieldType.NUMBER, + default: "25", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.age).toEqual(25) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + age: 30, + }) + expect(row.age).toEqual(30) + }) + }) + + describe("date column", () => { + it("creates a row with a default value successfully", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, + date: { + name: "date", + type: FieldType.DATETIME, + default: "2023-01-26T11:48:57.000Z", + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + expect(row.date).toEqual("2023-01-26T11:48:57.000Z") + }) + + it("gives an error if the default value is invalid", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + date: { + name: "date", + type: FieldType.DATETIME, + default: "invalid", + }, + }, + }) + ) + await config.api.row.save( + table._id!, + {}, + { + status: 400, + body: { + message: `Invalid default value for field 'date' - Invalid date value: "invalid"`, + }, + } + ) + }) + }) + + describe("options column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + status: { + name: "status", + type: FieldType.OPTIONS, + default: "requested", constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, + inclusion: ["requested", "approved"], }, }, }, }) ) - - const sequence = Array(50) - .fill(0) - .map((_, i) => i + 1) - - // This block of code is simulating users creating auto ID rows at the - // same time. It's expected that this operation will sometimes return - // a document conflict error (409), but the idea is to retry in those - // situations. The code below does this a large number of times with - // small, random delays between them to try and get through the list - // as quickly as possible. - await Promise.all( - sequence.map(async () => { - const attempts = 30 - for (let attempt = 0; attempt < attempts; attempt++) { - try { - await config.api.row.save(table._id!, {}) - return - } catch (e) { - await new Promise(r => setTimeout(r, Math.random() * 50)) - } - } - throw new Error(`Failed to create row after ${attempts} attempts`) - }) - ) - - const rows = await config.api.row.fetch(table._id!) - expect(rows).toHaveLength(50) - - // The main purpose of this test is to ensure that even under pressure, - // we maintain data integrity. An auto ID column should hand out - // monotonically increasing unique integers no matter what. - const ids = rows.map(r => r["Row ID"]) - expect(ids).toEqual(expect.arrayContaining(sequence)) }) - isInternal && - it("doesn't allow creating in user table", async () => { - const response = await config.api.row.save( - InternalTable.USER_METADATA, - { - firstName: "Joe", - lastName: "Joe", - email: "joe@joe.com", - roles: {}, - }, - { status: 400 } - ) - expect(response.message).toBe("Cannot create new user entry.") + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.status).toEqual("requested") }) - it("should not mis-parse date string out of JSON", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + status: "approved", }) - ) - - const row = await config.api.row.save(table._id!, { - name: `{ "foo": "2023-01-26T11:48:57.000Z" }`, + expect(row.status).toEqual("approved") }) - - expect(row.name).toEqual(`{ "foo": "2023-01-26T11:48:57.000Z" }`) }) - describe("default values", () => { - let table: Table + describe("array column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + food: { + name: "food", + type: FieldType.ARRAY, + default: ["apple", "orange"], + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["apple", "orange", "banana"], + }, + }, + }, + }) + ) + }) + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.food).toEqual(["apple", "orange"]) + }) + + it("creates a new row with a default value when given an empty list", async () => { + const row = await config.api.row.save(table._id!, { food: [] }) + expect(row.food).toEqual(["apple", "orange"]) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + food: ["orange"], + }) + expect(row.food).toEqual(["orange"]) + }) + + it("resets back to its default value when empty", async () => { + let row = await config.api.row.save(table._id!, { + food: ["orange"], + }) + row = await config.api.row.save(table._id!, { ...row, food: [] }) + expect(row.food).toEqual(["apple", "orange"]) + }) + }) + + describe("user column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + default: "{{ [Current User]._id }}", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.user._id).toEqual(config.getUser()._id) + }) + + it("does not use default value if value specified", async () => { + const id = `us_${utils.newid()}` + await config.createUser({ _id: id }) + const row = await config.api.row.save(table._id!, { + user: id, + }) + expect(row.user._id).toEqual(id) + }) + }) + + describe("multi-user column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + default: ["{{ [Current User]._id }}"], + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.users).toHaveLength(1) + expect(row.users[0]._id).toEqual(config.getUser()._id) + }) + + it("does not use default value if value specified", async () => { + const id = `us_${utils.newid()}` + await config.createUser({ _id: id }) + const row = await config.api.row.save(table._id!, { + users: [id], + }) + expect(row.users).toHaveLength(1) + expect(row.users[0]._id).toEqual(id) + }) + }) + + describe("boolean column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + active: { + name: "active", + type: FieldType.BOOLEAN, + default: "true", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.active).toEqual(true) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + active: false, + }) + expect(row.active).toEqual(false) + }) + }) + + describe("bigint column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + bigNumber: { + name: "bigNumber", + type: FieldType.BIGINT, + default: "1234567890", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.bigNumber).toEqual("1234567890") + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + bigNumber: "9876543210", + }) + expect(row.bigNumber).toEqual("9876543210") + }) + }) + + describe("bindings", () => { describe("string column", () => { beforeAll(async () => { table = await config.api.table.save( @@ -397,16 +706,18 @@ datasourceDescribe( description: { name: "description", type: FieldType.STRING, - default: "default description", + default: `{{ date now "YYYY-MM-DDTHH:mm:ss" }}`, }, }, }) ) }) - it("creates a new row with a default value successfully", async () => { + it("can use bindings in default values", async () => { const row = await config.api.row.save(table._id!, {}) - expect(row.description).toEqual("default description") + expect(row.description).toMatch( + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/ + ) }) it("does not use default value if value specified", async () => { @@ -416,18 +727,38 @@ datasourceDescribe( expect(row.description).toEqual("specified description") }) - it("uses the default value if value is null", async () => { - const row = await config.api.row.save(table._id!, { - description: null, - }) - expect(row.description).toEqual("default description") + it("can bind the current user", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.STRING, + default: `{{ [Current User]._id }}`, + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + expect(row.user).toEqual(config.getUser()._id) }) - it("uses the default value if value is undefined", async () => { - const row = await config.api.row.save(table._id!, { - description: undefined, - }) - expect(row.description).toEqual("default description") + it("cannot access current user password", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.STRING, + default: `{{ user.password }}`, + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + // For some reason it's null for internal tables, and undefined for + // external. + expect(row.user == null).toBe(true) }) }) @@ -439,330 +770,19 @@ datasourceDescribe( age: { name: "age", type: FieldType.NUMBER, - default: "25", + default: `{{ sum 10 10 5 }}`, }, }, }) ) }) - it("creates a new row with a default value successfully", async () => { + it("can use bindings in default values", async () => { const row = await config.api.row.save(table._id!, {}) expect(row.age).toEqual(25) }) - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - age: 30, - }) - expect(row.age).toEqual(30) - }) - }) - - describe("date column", () => { - it("creates a row with a default value successfully", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - date: { - name: "date", - type: FieldType.DATETIME, - default: "2023-01-26T11:48:57.000Z", - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - expect(row.date).toEqual("2023-01-26T11:48:57.000Z") - }) - - it("gives an error if the default value is invalid", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - date: { - name: "date", - type: FieldType.DATETIME, - default: "invalid", - }, - }, - }) - ) - await config.api.row.save( - table._id!, - {}, - { - status: 400, - body: { - message: `Invalid default value for field 'date' - Invalid date value: "invalid"`, - }, - } - ) - }) - }) - - describe("options column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - status: { - name: "status", - type: FieldType.OPTIONS, - default: "requested", - constraints: { - inclusion: ["requested", "approved"], - }, - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.status).toEqual("requested") - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - status: "approved", - }) - expect(row.status).toEqual("approved") - }) - }) - - describe("array column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - food: { - name: "food", - type: FieldType.ARRAY, - default: ["apple", "orange"], - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["apple", "orange", "banana"], - }, - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.food).toEqual(["apple", "orange"]) - }) - - it("creates a new row with a default value when given an empty list", async () => { - const row = await config.api.row.save(table._id!, { food: [] }) - expect(row.food).toEqual(["apple", "orange"]) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - food: ["orange"], - }) - expect(row.food).toEqual(["orange"]) - }) - - it("resets back to its default value when empty", async () => { - let row = await config.api.row.save(table._id!, { - food: ["orange"], - }) - row = await config.api.row.save(table._id!, { ...row, food: [] }) - expect(row.food).toEqual(["apple", "orange"]) - }) - }) - - describe("user column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - default: "{{ [Current User]._id }}", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.user._id).toEqual(config.getUser()._id) - }) - - it("does not use default value if value specified", async () => { - const id = `us_${utils.newid()}` - await config.createUser({ _id: id }) - const row = await config.api.row.save(table._id!, { - user: id, - }) - expect(row.user._id).toEqual(id) - }) - }) - - describe("multi-user column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - default: ["{{ [Current User]._id }}"], - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.users).toHaveLength(1) - expect(row.users[0]._id).toEqual(config.getUser()._id) - }) - - it("does not use default value if value specified", async () => { - const id = `us_${utils.newid()}` - await config.createUser({ _id: id }) - const row = await config.api.row.save(table._id!, { - users: [id], - }) - expect(row.users).toHaveLength(1) - expect(row.users[0]._id).toEqual(id) - }) - }) - - describe("boolean column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - active: { - name: "active", - type: FieldType.BOOLEAN, - default: "true", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.active).toEqual(true) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - active: false, - }) - expect(row.active).toEqual(false) - }) - }) - - describe("bigint column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - bigNumber: { - name: "bigNumber", - type: FieldType.BIGINT, - default: "1234567890", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.bigNumber).toEqual("1234567890") - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - bigNumber: "9876543210", - }) - expect(row.bigNumber).toEqual("9876543210") - }) - }) - - describe("bindings", () => { - describe("string column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - description: { - name: "description", - type: FieldType.STRING, - default: `{{ date now "YYYY-MM-DDTHH:mm:ss" }}`, - }, - }, - }) - ) - }) - - it("can use bindings in default values", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.description).toMatch( - /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/ - ) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - description: "specified description", - }) - expect(row.description).toEqual("specified description") - }) - - it("can bind the current user", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.STRING, - default: `{{ [Current User]._id }}`, - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - expect(row.user).toEqual(config.getUser()._id) - }) - - it("cannot access current user password", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.STRING, - default: `{{ user.password }}`, - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - // For some reason it's null for internal tables, and undefined for - // external. - expect(row.user == null).toBe(true) - }) - }) - - describe("number column", () => { + describe("invalid default value", () => { beforeAll(async () => { table = await config.api.table.save( saveTableRequest({ @@ -770,885 +790,907 @@ datasourceDescribe( age: { name: "age", type: FieldType.NUMBER, - default: `{{ sum 10 10 5 }}`, + default: `{{ capitalize "invalid" }}`, }, }, }) ) }) - it("can use bindings in default values", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.age).toEqual(25) - }) - - describe("invalid default value", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - age: { - name: "age", - type: FieldType.NUMBER, - default: `{{ capitalize "invalid" }}`, - }, - }, - }) - ) - }) - - it("throws an error when invalid default value", async () => { - await config.api.row.save( - table._id!, - {}, - { - status: 400, - body: { - message: - "Invalid default value for field 'age' - Invalid number value \"Invalid\"", - }, - } - ) - }) - }) - }) - }) - }) - - describe("relations to same table", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, - }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - - it("can create rows with both relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related1: [ + it("throws an error when invalid default value", async () => { + await config.api.row.save( + table._id!, + {}, { - _id: relatedRows[0]._id, - primaryDisplay: relatedRows[0].name, - }, - ], - related2: [ - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ], + status: 400, + body: { + message: + "Invalid default value for field 'age' - Invalid number value \"Invalid\"", + }, + } + ) }) - ) - }) - - it("can create rows with no relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", }) - - expect(row.related1).toBeUndefined() - expect(row.related2).toBeUndefined() - }) - - it("can create rows with only one relationships field", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [], - related2: [relatedRows[1]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related2: [ - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ], - }) - ) - expect(row.related1).toBeUndefined() }) }) }) - describe("get", () => { - it("reads an existing row successfully", async () => { - const existing = await config.api.row.save(table._id!, {}) - - const res = await config.api.row.get(table._id!, existing._id!) - - expect(res).toEqual({ - ...existing, - ...defaultRowFields, - }) - }) - - it("returns 404 when row does not exist", async () => { - const table = await config.api.table.save(defaultTable()) - await config.api.row.save(table._id!, {}) - await config.api.row.get(table._id!, "1234567", { - status: 404, - }) - }) - - isInternal && - it("can search row from user table", async () => { - const res = await config.api.row.get( - InternalTables.USER_METADATA, - config.userMetadataId! - ) - - expect(res).toEqual({ - ...config.getUser(), - _id: config.userMetadataId!, - _rev: expect.any(String), - roles: undefined, - roleId: "ADMIN", - tableId: InternalTables.USER_METADATA, - }) - }) - }) - - describe("fetch", () => { - it("fetches all rows for given tableId", async () => { - const table = await config.api.table.save(defaultTable()) - const rows = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - - const res = await config.api.row.fetch(table._id!) - expect(res.map(r => r._id)).toEqual( - expect.arrayContaining(rows.map(r => r._id)) - ) - }) - - it("returns 404 when table does not exist", async () => { - await config.api.row.fetch("1234567", { status: 404 }) - }) - }) - - describe("update", () => { - it("updates an existing row successfully", async () => { - const existing = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.save(table._id!, { - _id: existing._id, - _rev: existing._rev, - name: "Updated Name", - }) - - expect(res.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - !isInternal && - it("can update a row on an external table with a primary key", async () => { - const tableName = uuid.v4().substring(0, 10) - await client!.schema.createTable(tableName, table => { - table.increments("id").primary() - table.string("name") - }) - - const res = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = res.datasource.entities![tableName] - - const row = await config.api.row.save(table._id!, { - id: 1, - name: "Row 1", - }) - - const updatedRow = await config.api.row.save(table._id!, { - _id: row._id!, - name: "Row 1 Updated", - }) - - expect(updatedRow.name).toEqual("Row 1 Updated") - - const rows = await config.api.row.fetch(table._id!) - expect(rows).toHaveLength(1) - }) - - describe("relations to same table", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, - }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - - it("can edit rows with both relationships", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - row = await config.api.row.save(table._id!, { - ...row, - related1: [relatedRows[0]._id!, relatedRows[1]._id!], - related2: [relatedRows[2]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related1: expect.arrayContaining([ - { - _id: relatedRows[0]._id, - primaryDisplay: relatedRows[0].name, - }, - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ]), - related2: [ - { - _id: relatedRows[2]._id, - primaryDisplay: relatedRows[2].name, - }, - ], - }) - ) - }) - - it("can drop existing relationship", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - row = await config.api.row.save(table._id!, { - ...row, - related1: [], - related2: [relatedRows[2]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related2: [ - { - _id: relatedRows[2]._id, - primaryDisplay: relatedRows[2].name, - }, - ], - }) - ) - expect(row.related1).toBeUndefined() - }) - - it("can drop both relationships", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - row = await config.api.row.save(table._id!, { - ...row, - related1: [], - related2: [], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - }) - ) - expect(row.related1).toBeUndefined() - expect(row.related2).toBeUndefined() - }) - }) - }) - - describe("patch", () => { - let otherTable: Table + describe("relations to same table", () => { + let relatedRows: Row[] beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - otherTable = await config.api.table.save( + const relatedTable = await config.api.table.save( defaultTable({ schema: { - relationship: { - name: "relationship", - relationshipType: RelationshipType.ONE_TO_MANY, + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { type: FieldType.LINK, - tableId: table._id!, - fieldName: "relationship", + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, }, }, }) ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) }) - it("should update only the fields that are supplied", async () => { - const existing = await config.api.row.save(table._id!, {}) + it("can create rows with both relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) - const rowUsage = await getRowUsage() + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related1: [ + { + _id: relatedRows[0]._id, + primaryDisplay: relatedRows[0].name, + }, + ], + related2: [ + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ], + }) + ) + }) + + it("can create rows with no relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + }) + + expect(row.related1).toBeUndefined() + expect(row.related2).toBeUndefined() + }) + + it("can create rows with only one relationships field", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [], + related2: [relatedRows[1]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related2: [ + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ], + }) + ) + expect(row.related1).toBeUndefined() + }) + }) + }) + + describe("get", () => { + it("reads an existing row successfully", async () => { + const existing = await config.api.row.save(table._id!, {}) + + const res = await config.api.row.get(table._id!, existing._id!) + + expect(res).toEqual({ + ...existing, + ...defaultRowFields, + }) + }) + + it("returns 404 when row does not exist", async () => { + const table = await config.api.table.save(defaultTable()) + await config.api.row.save(table._id!, {}) + await config.api.row.get(table._id!, "1234567", { + status: 404, + }) + }) + + isInternal && + it("can search row from user table", async () => { + const res = await config.api.row.get( + InternalTables.USER_METADATA, + config.userMetadataId! + ) + + expect(res).toEqual({ + ...config.getUser(), + _id: config.userMetadataId!, + _rev: expect.any(String), + roles: undefined, + roleId: "ADMIN", + tableId: InternalTables.USER_METADATA, + }) + }) + }) + + describe("fetch", () => { + it("fetches all rows for given tableId", async () => { + const table = await config.api.table.save(defaultTable()) + const rows = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + + const res = await config.api.row.fetch(table._id!) + expect(res.map(r => r._id)).toEqual( + expect.arrayContaining(rows.map(r => r._id)) + ) + }) + + it("returns 404 when table does not exist", async () => { + await config.api.row.fetch("1234567", { status: 404 }) + }) + }) + + describe("update", () => { + it("updates an existing row successfully", async () => { + const existing = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.save(table._id!, { + _id: existing._id, + _rev: existing._rev, + name: "Updated Name", + }) + + expect(res.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + !isInternal && + it("can update a row on an external table with a primary key", async () => { + const tableName = uuid.v4().substring(0, 10) + await client!.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = res.datasource.entities![tableName] + + const row = await config.api.row.save(table._id!, { + id: 1, + name: "Row 1", + }) + + const updatedRow = await config.api.row.save(table._id!, { + _id: row._id!, + name: "Row 1 Updated", + }) + + expect(updatedRow.name).toEqual("Row 1 Updated") + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(1) + }) + + describe("relations to same table", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }, + }) + ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) + }) + + it("can edit rows with both relationships", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [relatedRows[0]._id!, relatedRows[1]._id!], + related2: [relatedRows[2]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related1: expect.arrayContaining([ + { + _id: relatedRows[0]._id, + primaryDisplay: relatedRows[0].name, + }, + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ]), + related2: [ + { + _id: relatedRows[2]._id, + primaryDisplay: relatedRows[2].name, + }, + ], + }) + ) + }) + + it("can drop existing relationship", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [], + related2: [relatedRows[2]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related2: [ + { + _id: relatedRows[2]._id, + primaryDisplay: relatedRows[2].name, + }, + ], + }) + ) + expect(row.related1).toBeUndefined() + }) + + it("can drop both relationships", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [], + related2: [], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + }) + ) + expect(row.related1).toBeUndefined() + expect(row.related2).toBeUndefined() + }) + }) + }) + + describe("patch", () => { + let otherTable: Table + + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + otherTable = await config.api.table.save( + defaultTable({ + schema: { + relationship: { + name: "relationship", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: table._id!, + fieldName: "relationship", + }, + }, + }) + ) + }) + + it("should update only the fields that are supplied", async () => { + const existing = await config.api.row.save(table._id!, {}) + + const rowUsage = await getRowUsage() + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: "Updated Name", + }) + + expect(row.name).toEqual("Updated Name") + expect(row.description).toEqual(existing.description) + + const savedRow = await config.api.row.get(table._id!, row._id!) + + expect(savedRow.description).toEqual(existing.description) + expect(savedRow.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + it("should update only the fields that are supplied and emit the correct oldRow", async () => { + let beforeRow = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + const opts = { + name: "row:update", + matchFn: (event: UpdatedRowEventEmitter) => + event.row._id === beforeRow._id, + } + const event = await waitForEvent(opts, async () => { + await config.api.row.patch(table._id!, { + _id: beforeRow._id!, + _rev: beforeRow._rev!, + tableId: table._id!, + name: "Updated Name", + }) + }) + + expect(event.oldRow).toBeDefined() + expect(event.oldRow.name).toEqual("test") + expect(event.row.name).toEqual("Updated Name") + expect(event.oldRow.description).toEqual(beforeRow.description) + expect(event.row.description).toEqual(beforeRow.description) + }) + + it("should throw an error when given improper types", async () => { + const existing = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + await config.api.row.patch( + table._id!, + { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: 1, + }, + { status: 400 } + ) + + await assertRowUsage(rowUsage) + }) + + it("should not overwrite links if those links are not set", async () => { + let linkField: FieldSchema = { + type: FieldType.LINK, + name: "", + fieldName: "", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: InternalTable.USER_METADATA, + } + + let table = await config.api.table.save({ + name: "TestTable", + type: "table", + sourceType: TableSourceType.INTERNAL, + sourceId: INTERNAL_TABLE_SOURCE_ID, + schema: { + user1: { ...linkField, name: "user1", fieldName: "user1" }, + user2: { ...linkField, name: "user2", fieldName: "user2" }, + }, + }) + + let user1 = await config.createUser() + let user2 = await config.createUser() + + let row = await config.api.row.save(table._id!, { + user1: [{ _id: user1._id }], + user2: [{ _id: user2._id }], + }) + + let getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.user1[0]._id).toEqual(user1._id) + expect(getResp.user2[0]._id).toEqual(user2._id) + + let patchResp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: table._id!, + user1: [{ _id: user2._id }], + }) + expect(patchResp.user1[0]._id).toEqual(user2._id) + expect(patchResp.user2[0]._id).toEqual(user2._id) + + getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.user1[0]._id).toEqual(user2._id) + expect(getResp.user2[0]._id).toEqual(user2._id) + }) + + it("should be able to remove a relationship from many side", async () => { + const row = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + }) + const row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + }) + const { _id } = await config.api.row.save(table._id!, { + relationship: [{ _id: row._id }, { _id: row2._id }], + }) + const relatedRow = await config.api.row.get(table._id!, _id!, { + status: 200, + }) + expect(relatedRow.relationship.length).toEqual(2) + await config.api.row.save(table._id!, { + ...relatedRow, + relationship: [{ _id: row._id }], + }) + const afterRelatedRow = await config.api.row.get(table._id!, _id!, { + status: 200, + }) + expect(afterRelatedRow.relationship.length).toEqual(1) + expect(afterRelatedRow.relationship[0]._id).toEqual(row._id) + }) + + it("should be able to update relationships when both columns are same name", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + let row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + relationship: [row._id], + }) + row = await config.api.row.get(table._id!, row._id!) + expect(row.relationship.length).toBe(1) + const resp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: row.tableId!, + name: "test2", + relationship: [row2._id], + }) + expect(resp.relationship.length).toBe(1) + }) + + !isInternal && + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. + !isMSSQL && + it("should support updating fields that are part of a composite key", async () => { + const tableRequest = saveTableRequest({ + primary: ["number", "string"], + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + number: { + type: FieldType.NUMBER, + name: "number", + }, + }, + }) + + delete tableRequest.schema.id + + const table = await config.api.table.save(tableRequest) + + const stringValue = generator.word() + + // MySQL and MariaDB auto-increment fields have a minimum value of 1. If + // you try to save a row with a value of 0 it will use 1 instead. + const naturalValue = generator.integer({ min: 1, max: 1000 }) + + const existing = await config.api.row.save(table._id!, { + string: stringValue, + number: naturalValue, + }) + + expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`) const row = await config.api.row.patch(table._id!, { _id: existing._id!, _rev: existing._rev!, tableId: table._id!, - name: "Updated Name", + string: stringValue, + number: 1500, }) - expect(row.name).toEqual("Updated Name") - expect(row.description).toEqual(existing.description) + expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`) + }) + }) - const savedRow = await config.api.row.get(table._id!, row._id!) + describe("destroy", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) - expect(savedRow.description).toEqual(existing.description) - expect(savedRow.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) + it("should be able to delete a row", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow], + }) + expect(res[0]._id).toEqual(createdRow._id) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it("should be able to delete a row with ID only", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow._id!], + }) + expect(res[0]._id).toEqual(createdRow._id) + expect(res[0].tableId).toEqual(table._id!) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it("should be able to bulk delete rows, including a row that doesn't exist", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const createdRow2 = await config.api.row.save(table._id!, {}) + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow, createdRow2, { _id: "9999999" }], }) - it("should update only the fields that are supplied and emit the correct oldRow", async () => { - let beforeRow = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - const opts = { - name: "row:update", - matchFn: (event: UpdatedRowEventEmitter) => - event.row._id === beforeRow._id, - } - const event = await waitForEvent(opts, async () => { - await config.api.row.patch(table._id!, { - _id: beforeRow._id!, - _rev: beforeRow._rev!, - tableId: table._id!, - name: "Updated Name", - }) - }) + expect(res.map(r => r._id)).toEqual( + expect.arrayContaining([createdRow._id, createdRow2._id]) + ) + expect(res.length).toEqual(2) + }) - expect(event.oldRow).toBeDefined() - expect(event.oldRow.name).toEqual("test") - expect(event.row.name).toEqual("Updated Name") - expect(event.oldRow.description).toEqual(beforeRow.description) - expect(event.row.description).toEqual(beforeRow.description) - }) + describe("relations to same table", () => { + let relatedRows: Row[] - it("should throw an error when given improper types", async () => { - const existing = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - await config.api.row.patch( - table._id!, - { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: 1, - }, - { status: 400 } - ) - - await assertRowUsage(rowUsage) - }) - - it("should not overwrite links if those links are not set", async () => { - let linkField: FieldSchema = { - type: FieldType.LINK, - name: "", - fieldName: "", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: InternalTable.USER_METADATA, - } - - let table = await config.api.table.save({ - name: "TestTable", - type: "table", - sourceType: TableSourceType.INTERNAL, - sourceId: INTERNAL_TABLE_SOURCE_ID, - schema: { - user1: { ...linkField, name: "user1", fieldName: "user1" }, - user2: { ...linkField, name: "user2", fieldName: "user2" }, - }, - }) - - let user1 = await config.createUser() - let user2 = await config.createUser() - - let row = await config.api.row.save(table._id!, { - user1: [{ _id: user1._id }], - user2: [{ _id: user2._id }], - }) - - let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.user1[0]._id).toEqual(user1._id) - expect(getResp.user2[0]._id).toEqual(user2._id) - - let patchResp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: table._id!, - user1: [{ _id: user2._id }], - }) - expect(patchResp.user1[0]._id).toEqual(user2._id) - expect(patchResp.user2[0]._id).toEqual(user2._id) - - getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.user1[0]._id).toEqual(user2._id) - expect(getResp.user2[0]._id).toEqual(user2._id) - }) - - it("should be able to remove a relationship from many side", async () => { - const row = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - }) - const row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - }) - const { _id } = await config.api.row.save(table._id!, { - relationship: [{ _id: row._id }, { _id: row2._id }], - }) - const relatedRow = await config.api.row.get(table._id!, _id!, { - status: 200, - }) - expect(relatedRow.relationship.length).toEqual(2) - await config.api.row.save(table._id!, { - ...relatedRow, - relationship: [{ _id: row._id }], - }) - const afterRelatedRow = await config.api.row.get(table._id!, _id!, { - status: 200, - }) - expect(afterRelatedRow.relationship.length).toEqual(1) - expect(afterRelatedRow.relationship[0]._id).toEqual(row._id) - }) - - it("should be able to update relationships when both columns are same name", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - let row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - relationship: [row._id], - }) - row = await config.api.row.get(table._id!, row._id!) - expect(row.relationship.length).toBe(1) - const resp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: row.tableId!, - name: "test2", - relationship: [row2._id], - }) - expect(resp.relationship.length).toBe(1) - }) - - !isInternal && - // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing - // to identity columns. This is not something Budibase does currently. - !isMSSQL && - it("should support updating fields that are part of a composite key", async () => { - const tableRequest = saveTableRequest({ - primary: ["number", "string"], + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ schema: { - string: { - type: FieldType.STRING, - name: "string", + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, }, - number: { - type: FieldType.NUMBER, - name: "number", + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, }, }, }) - - delete tableRequest.schema.id - - const table = await config.api.table.save(tableRequest) - - const stringValue = generator.word() - - // MySQL and MariaDB auto-increment fields have a minimum value of 1. If - // you try to save a row with a value of 0 it will use 1 instead. - const naturalValue = generator.integer({ min: 1, max: 1000 }) - - const existing = await config.api.row.save(table._id!, { - string: stringValue, - number: naturalValue, - }) - - expect(existing._id).toEqual( - `%5B${naturalValue}%2C'${stringValue}'%5D` - ) - - const row = await config.api.row.patch(table._id!, { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - string: stringValue, - number: 1500, - }) - - expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`) - }) - }) - - describe("destroy", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should be able to delete a row", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow], - }) - expect(res[0]._id).toEqual(createdRow._id) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it("should be able to delete a row with ID only", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow._id!], - }) - expect(res[0]._id).toEqual(createdRow._id) - expect(res[0].tableId).toEqual(table._id!) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it("should be able to bulk delete rows, including a row that doesn't exist", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const createdRow2 = await config.api.row.save(table._id!, {}) - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow, createdRow2, { _id: "9999999" }], - }) - - expect(res.map(r => r._id)).toEqual( - expect.arrayContaining([createdRow._id, createdRow2._id]) ) - expect(res.length).toEqual(2) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) }) - describe("relations to same table", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, - }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) + it("can delete rows with both relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], }) - it("can delete rows with both relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) + await config.api.row.delete(table._id!, { _id: row._id! }) - await config.api.row.delete(table._id!, { _id: row._id! }) + await config.api.row.get(table._id!, row._id!, { status: 404 }) + }) - await config.api.row.get(table._id!, row._id!, { status: 404 }) + it("can delete rows with empty relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [], + related2: [], }) - it("can delete rows with empty relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [], - related2: [], - }) + await config.api.row.delete(table._id!, { _id: row._id! }) - await config.api.row.delete(table._id!, { _id: row._id! }) - - await config.api.row.get(table._id!, row._id!, { status: 404 }) - }) + await config.api.row.get(table._id!, row._id!, { status: 404 }) }) }) + }) - describe("validate", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) + describe("validate", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) - it("should return no errors on valid row", async () => { - const rowUsage = await getRowUsage() + it("should return no errors on valid row", async () => { + const rowUsage = await getRowUsage() - const res = await config.api.row.validate(table._id!, { name: "ivan" }) + const res = await config.api.row.validate(table._id!, { name: "ivan" }) + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + await assertRowUsage(rowUsage) + }) + + it("should errors on invalid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { name: 1 }) + + if (isInternal) { + expect(res.valid).toBe(false) + expect(Object.keys(res.errors)).toEqual(["name"]) + } else { + // Validation for external is not implemented, so it will always return valid expect(res.valid).toBe(true) expect(Object.keys(res.errors)).toEqual([]) - await assertRowUsage(rowUsage) - }) + } + await assertRowUsage(rowUsage) + }) + }) - it("should errors on invalid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: 1 }) - - if (isInternal) { - expect(res.valid).toBe(false) - expect(Object.keys(res.errors)).toEqual(["name"]) - } else { - // Validation for external is not implemented, so it will always return valid - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - } - await assertRowUsage(rowUsage) - }) + describe("bulkDelete", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) }) - describe("bulkDelete", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) + it("should be able to delete a bulk set of rows", async () => { + const row1 = await config.api.row.save(table._id!, {}) + const row2 = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2], }) - it("should be able to delete a bulk set of rows", async () => { - const row1 = await config.api.row.save(table._id!, {}) - const row2 = await config.api.row.save(table._id!, {}) + expect(res.length).toEqual(2) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) + }) + + it("should be able to delete a variety of row set types", async () => { + const [row1, row2, row3] = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2._id!, { _id: row3._id }], + }) + + expect(res.length).toEqual(3) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage) + }) + + it("should accept a valid row object and delete the row", async () => { + const row1 = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, row1 as DeleteRow) + + expect(res.id).toEqual(row1._id) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( + "should ignore malformed/invalid delete request: %s", + async (request: any) => { const rowUsage = await getRowUsage() - const res = await config.api.row.bulkDelete(table._id!, { - rows: [row1, row2], + await config.api.row.delete(table._id!, request, { + status: 400, + body: { + message: "Invalid delete rows request", + }, }) - expect(res.length).toEqual(2) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) - }) + await assertRowUsage(rowUsage) + } + ) + }) - it("should be able to delete a variety of row set types", async () => { - const [row1, row2, row3] = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - const rowUsage = await getRowUsage() + describe("bulkImport", () => { + isInternal && + it("should update Auto ID field after bulk import", async () => { + const table = await config.api.table.save( + saveTableRequest({ + primary: ["autoId"], + schema: { + autoId: { + name: "autoId", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + constraints: { + type: "number", + presence: false, + }, + }, + }, + }) + ) - const res = await config.api.row.bulkDelete(table._id!, { - rows: [row1, row2._id!, { _id: row3._id }], + let row = await config.api.row.save(table._id!, {}) + expect(row.autoId).toEqual(1) + + await config.api.row.bulkImport(table._id!, { + rows: [{ autoId: 2 }], }) - expect(res.length).toEqual(3) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage) + row = await config.api.row.save(table._id!, {}) + expect(row.autoId).toEqual(3) }) - it("should accept a valid row object and delete the row", async () => { - const row1 = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() + isInternal && + it("should reject bulkImporting relationship fields", async () => { + const table1 = await config.api.table.save(saveTableRequest()) + const table2 = await config.api.table.save( + saveTableRequest({ + schema: { + relationship: { + name: "relationship", + type: FieldType.LINK, + tableId: table1._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + fieldName: "relationship", + }, + }, + }) + ) - const res = await config.api.row.delete(table._id!, row1 as DeleteRow) - - expect(res.id).toEqual(row1._id) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( - "should ignore malformed/invalid delete request: %s", - async (request: any) => { - const rowUsage = await getRowUsage() - - await config.api.row.delete(table._id!, request, { + const table1Row1 = await config.api.row.save(table1._id!, {}) + await config.api.row.bulkImport( + table2._id!, + { + rows: [{ relationship: [table1Row1._id!] }], + }, + { status: 400, body: { - message: "Invalid delete rows request", + message: + 'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"', }, - }) + } + ) + }) - await assertRowUsage(rowUsage) - } + it("should be able to bulkImport rows", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) ) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + + await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) }) - describe("bulkImport", () => { - isInternal && - it("should update Auto ID field after bulk import", async () => { - const table = await config.api.table.save( - saveTableRequest({ - primary: ["autoId"], - schema: { - autoId: { - name: "autoId", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - autocolumn: true, - constraints: { - type: "number", - presence: false, - }, - }, - }, - }) - ) - - let row = await config.api.row.save(table._id!, {}) - expect(row.autoId).toEqual(1) - - await config.api.row.bulkImport(table._id!, { - rows: [{ autoId: 2 }], - }) - - row = await config.api.row.save(table._id!, {}) - expect(row.autoId).toEqual(3) - }) - - isInternal && - it("should reject bulkImporting relationship fields", async () => { - const table1 = await config.api.table.save(saveTableRequest()) - const table2 = await config.api.table.save( - saveTableRequest({ - schema: { - relationship: { - name: "relationship", - type: FieldType.LINK, - tableId: table1._id!, - relationshipType: RelationshipType.ONE_TO_MANY, - fieldName: "relationship", - }, - }, - }) - ) - - const table1Row1 = await config.api.row.save(table1._id!, {}) - await config.api.row.bulkImport( - table2._id!, - { - rows: [{ relationship: [table1Row1._id!] }], - }, - { - status: 400, - body: { - message: - 'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"', - }, - } - ) - }) - - it("should be able to bulkImport rows", async () => { + isInternal && + it("should be able to update existing rows on bulkImport", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { @@ -1664,6 +1706,11 @@ datasourceDescribe( }) ) + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", + }) + const rowUsage = await getRowUsage() await config.api.row.bulkImport(table._id!, { @@ -1672,6 +1719,60 @@ datasourceDescribe( name: "Row 1", description: "Row 1 description", }, + { ...existingRow, name: "Updated existing row" }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + identifierFields: ["_id"], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + expect(rows[2].name).toEqual("Updated existing row") + expect(rows[2].description).toEqual("Existing description") + + await assertRowUsage(rowUsage + 2) + }) + + isInternal && + it("should create new rows if not identifierFields are provided", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", + }) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { ...existingRow, name: "Updated existing row" }, { name: "Row 2", description: "Row 2 description", @@ -1680,1658 +1781,1588 @@ datasourceDescribe( }) const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(2) + expect(rows.length).toEqual(4) rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1") - expect(rows[0].description).toEqual("Row 1 description") - expect(rows[1].name).toEqual("Row 2") - expect(rows[1].description).toEqual("Row 2 description") + expect(rows[0].name).toEqual("Existing row") + expect(rows[0].description).toEqual("Existing description") + expect(rows[1].name).toEqual("Row 1") + expect(rows[1].description).toEqual("Row 1 description") + expect(rows[2].name).toEqual("Row 2") + expect(rows[2].description).toEqual("Row 2 description") + expect(rows[3].name).toEqual("Updated existing row") + expect(rows[3].description).toEqual("Existing description") - await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) + await assertRowUsage(rowUsage + 3) }) - isInternal && - it("should be able to update existing rows on bulkImport", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const existingRow = await config.api.row.save(table._id!, { - name: "Existing row", - description: "Existing description", - }) - - const rowUsage = await getRowUsage() - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { ...existingRow, name: "Updated existing row" }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], - identifierFields: ["_id"], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1") - expect(rows[0].description).toEqual("Row 1 description") - expect(rows[1].name).toEqual("Row 2") - expect(rows[1].description).toEqual("Row 2 description") - expect(rows[2].name).toEqual("Updated existing row") - expect(rows[2].description).toEqual("Existing description") - - await assertRowUsage(rowUsage + 2) - }) - - isInternal && - it("should create new rows if not identifierFields are provided", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const existingRow = await config.api.row.save(table._id!, { - name: "Existing row", - description: "Existing description", - }) - - const rowUsage = await getRowUsage() - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { ...existingRow, name: "Updated existing row" }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(4) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Existing row") - expect(rows[0].description).toEqual("Existing description") - expect(rows[1].name).toEqual("Row 1") - expect(rows[1].description).toEqual("Row 1 description") - expect(rows[2].name).toEqual("Row 2") - expect(rows[2].description).toEqual("Row 2 description") - expect(rows[3].name).toEqual("Updated existing row") - expect(rows[3].description).toEqual("Existing description") - - await assertRowUsage(rowUsage + 3) - }) - - // Upserting isn't yet supported in MSSQL / Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - it("should be able to update existing rows with bulkImport", async () => { - const table = await config.api.table.save( - saveTableRequest({ - primary: ["userId"], - schema: { - userId: { - type: FieldType.NUMBER, - name: "userId", - constraints: { - presence: true, - }, - }, - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const row1 = await config.api.row.save(table._id!, { - userId: 1, - name: "Row 1", - description: "Row 1 description", - }) - - const row2 = await config.api.row.save(table._id!, { - userId: 2, - name: "Row 2", - description: "Row 2 description", - }) - - await config.api.row.bulkImport(table._id!, { - identifierFields: ["userId"], - rows: [ - { - userId: row1.userId, - name: "Row 1 updated", - description: "Row 1 description updated", - }, - { - userId: row2.userId, - name: "Row 2 updated", - description: "Row 2 description updated", - }, - { - userId: 3, - name: "Row 3", - description: "Row 3 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1 updated") - expect(rows[0].description).toEqual("Row 1 description updated") - expect(rows[1].name).toEqual("Row 2 updated") - expect(rows[1].description).toEqual("Row 2 description updated") - expect(rows[2].name).toEqual("Row 3") - expect(rows[2].description).toEqual("Row 3 description") - }) - - // Upserting isn't yet supported in MSSQL or Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - !isInternal && - it("should be able to update existing rows with composite primary keys with bulkImport", async () => { - const tableName = uuid.v4() - await client?.schema.createTable(tableName, table => { - table.integer("companyId") - table.integer("userId") - table.string("name") - table.string("description") - table.primary(["companyId", "userId"]) - }) - - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = resp.datasource.entities![tableName] - - const row1 = await config.api.row.save(table._id!, { - companyId: 1, - userId: 1, - name: "Row 1", - description: "Row 1 description", - }) - - const row2 = await config.api.row.save(table._id!, { - companyId: 1, - userId: 2, - name: "Row 2", - description: "Row 2 description", - }) - - await config.api.row.bulkImport(table._id!, { - identifierFields: ["companyId", "userId"], - rows: [ - { - companyId: 1, - userId: row1.userId, - name: "Row 1 updated", - description: "Row 1 description updated", - }, - { - companyId: 1, - userId: row2.userId, - name: "Row 2 updated", - description: "Row 2 description updated", - }, - { - companyId: 1, - userId: 3, - name: "Row 3", - description: "Row 3 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1 updated") - expect(rows[0].description).toEqual("Row 1 description updated") - expect(rows[1].name).toEqual("Row 2 updated") - expect(rows[1].description).toEqual("Row 2 description updated") - expect(rows[2].name).toEqual("Row 3") - expect(rows[2].description).toEqual("Row 3 description") - }) - - // Upserting isn't yet supported in MSSQL/Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - !isInternal && - it("should be able to update existing rows an autoID primary key", async () => { - const tableName = uuid.v4() - await client!.schema.createTable(tableName, table => { - table.increments("userId").primary() - table.string("name") - }) - - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = resp.datasource.entities![tableName] - - const row1 = await config.api.row.save(table._id!, { - name: "Clare", - }) - - const row2 = await config.api.row.save(table._id!, { - name: "Jeff", - }) - - await config.api.row.bulkImport(table._id!, { - identifierFields: ["userId"], - rows: [ - { - userId: row1.userId, - name: "Clare updated", - }, - { - userId: row2.userId, - name: "Jeff updated", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(2) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Clare updated") - expect(rows[1].name).toEqual("Jeff updated") - }) - }) - - describe("enrich", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should allow enriching some linked rows", async () => { - const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( - config.getTenantId(), - async () => { - const linkedTable = await config.api.table.save( - defaultTable({ - schema: { - link: { - name: "link", - fieldName: "link", - type: FieldType.LINK, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: table._id!, - }, - }, - }) - ) - const firstRow = await config.api.row.save(table._id!, { - name: "Test Contact", - description: "original description", - }) - const secondRow = await config.api.row.save(linkedTable._id!, { - name: "Test 2", - description: "og desc", - link: [{ _id: firstRow._id }], - }) - return { linkedTable, firstRow, secondRow } - } - ) - const rowUsage = await getRowUsage() - - // test basic enrichment - const resBasic = await config.api.row.get( - linkedTable._id!, - secondRow._id! - ) - expect(resBasic.link.length).toBe(1) - expect(resBasic.link[0]).toEqual({ - _id: firstRow._id, - primaryDisplay: firstRow.name, - }) - - // test full enrichment - const resEnriched = await config.api.row.getEnriched( - linkedTable._id!, - secondRow._id! - ) - expect(resEnriched.link.length).toBe(1) - expect(resEnriched.link[0]._id).toBe(firstRow._id) - expect(resEnriched.link[0].name).toBe("Test Contact") - expect(resEnriched.link[0].description).toBe("original description") - await assertRowUsage(rowUsage) - }) - }) - - isInternal && - describe("attachments and signatures", () => { - const coreAttachmentEnrichment = async ( - schema: TableSchema, - field: string, - attachmentCfg: string | string[] - ) => { - const testTable = await config.api.table.save( - defaultTable({ - schema, - }) - ) - const attachmentToStoreKey = (attachmentId: string) => { - return { - key: `${config.getAppId()}/attachments/${attachmentId}`, - } - } - const draftRow = { - name: "test", - description: "test", - [field]: - typeof attachmentCfg === "string" - ? attachmentToStoreKey(attachmentCfg) - : attachmentCfg.map(attachmentToStoreKey), - tableId: testTable._id, - } - const row = await config.api.row.save(testTable._id!, draftRow) - - await withEnv({ SELF_HOSTED: "true" }, async () => { - return context.doInAppContext(config.getAppId(), async () => { - const enriched: Row[] = await outputProcessing(testTable, [row]) - const [targetRow] = enriched - const attachmentEntries = Array.isArray(targetRow[field]) - ? targetRow[field] - : [targetRow[field]] - - for (const entry of attachmentEntries) { - const attachmentId = entry.key.split("/").pop() - expect(entry.url.split("?")[0]).toBe( - `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` - ) - } - }) - }) - } - - it("should allow enriching single attachment rows", async () => { - await coreAttachmentEnrichment( - { - attachment: { - type: FieldType.ATTACHMENT_SINGLE, - name: "attachment", - constraints: { presence: false }, - }, - }, - "attachment", - `${uuid.v4()}.csv` - ) - }) - - it("should allow enriching attachment list rows", async () => { - await coreAttachmentEnrichment( - { - attachments: { - type: FieldType.ATTACHMENTS, - name: "attachments", - constraints: { type: "array", presence: false }, - }, - }, - "attachments", - [`${uuid.v4()}.csv`] - ) - }) - - it("should allow enriching signature rows", async () => { - await coreAttachmentEnrichment( - { - signature: { - type: FieldType.SIGNATURE_SINGLE, - name: "signature", - constraints: { presence: false }, - }, - }, - "signature", - `${uuid.v4()}.png` - ) - }) - }) - - describe("exportRows", () => { - beforeEach(async () => { - table = await config.api.table.save(defaultTable()) - }) - - isInternal && - it("should not export internal couchdb fields", async () => { - const existing = await config.api.row.save(table._id!, { - name: generator.guid(), - description: generator.paragraph(), - }) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - expect(Object.keys(row)).toEqual(["_id", "name", "description"]) - }) - - !isInternal && - it("should allow exporting all columns", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure all original columns were exported - expect(Object.keys(row).length).toBe(Object.keys(existing).length) - Object.keys(existing).forEach(key => { - expect(row[key]).toEqual(existing[key]) - }) - }) - - it("should allow exporting without filtering", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - expect(row._id).toEqual(existing._id) - }) - - it("should allow exporting only certain columns", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - columns: ["_id"], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure only the _id column was exported - expect(Object.keys(row).length).toEqual(1) - expect(row._id).toEqual(existing._id) - }) - - it("should handle single quotes in row filtering", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [`['${existing._id!}']`], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - expect(row._id).toEqual(existing._id) - }) - - it("should return an error if no table is found", async () => { - const existing = await config.api.row.save(table._id!, {}) - await config.api.row.exportRows( - "1234567", - { rows: [existing._id!] }, - RowExportFormat.JSON, - { status: 404 } - ) - }) - - // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing - // to identity columns. This is not something Budibase does currently. - !isMSSQL && - it("should handle filtering by composite primary keys", async () => { - const tableRequest = saveTableRequest({ - primary: ["number", "string"], - schema: { - string: { - type: FieldType.STRING, - name: "string", - }, - number: { - type: FieldType.NUMBER, - name: "number", - }, - }, - }) - delete tableRequest.schema.id - - const table = await config.api.table.save(tableRequest) - const toCreate = generator - .unique(() => generator.integer({ min: 0, max: 10000 }), 10) - .map(number => ({ number, string: generator.word({ length: 30 }) })) - - const rows = await Promise.all( - toCreate.map(d => config.api.row.save(table._id!, d)) - ) - - const res = await config.api.row.exportRows(table._id!, { - rows: _.sampleSize(rows, 3).map(r => r._id!), - }) - const results = JSON.parse(res) - expect(results.length).toEqual(3) - }) - - describe("should allow exporting all column types", () => { - let tableId: string - let expectedRowData: Row - - beforeAll(async () => { - const fullSchema = setup.structures.fullSchemaWithoutLinks({ - allRequired: true, - }) - - const table = await config.api.table.save( - saveTableRequest({ - ...setup.structures.basicTable(), - schema: fullSchema, - primary: ["string"], - }) - ) - tableId = table._id! - - const rowValues: Record = { - [FieldType.STRING]: generator.guid(), - [FieldType.LONGFORM]: generator.paragraph(), - [FieldType.OPTIONS]: "option 2", - [FieldType.ARRAY]: ["options 2", "options 4"], - [FieldType.NUMBER]: generator.natural(), - [FieldType.BOOLEAN]: generator.bool(), - [FieldType.DATETIME]: generator.date().toISOString(), - [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], - [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), - [FieldType.FORMULA]: undefined, // generated field - [FieldType.AUTO]: undefined, // generated field - [FieldType.AI]: "LLM Output", - [FieldType.JSON]: { name: generator.guid() }, - [FieldType.INTERNAL]: generator.guid(), - [FieldType.BARCODEQR]: generator.guid(), - [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), - [FieldType.BIGINT]: generator.integer().toString(), - [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], - [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, - } - const row = await config.api.row.save(table._id!, rowValues) - expectedRowData = { - _id: row._id, - [FieldType.STRING]: rowValues[FieldType.STRING], - [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], - [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], - [FieldType.ARRAY]: rowValues[FieldType.ARRAY], - [FieldType.NUMBER]: rowValues[FieldType.NUMBER], - [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], - [FieldType.DATETIME]: rowValues[FieldType.DATETIME], - [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( - (a: any) => - expect.objectContaining({ - ...a, - url: expect.any(String), - }) - ), - [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ - ...rowValues[FieldType.ATTACHMENT_SINGLE], - url: expect.any(String), - }), - [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, - [FieldType.AUTO]: expect.any(Number), - [FieldType.AI]: expect.any(String), - [FieldType.JSON]: rowValues[FieldType.JSON], - [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], - [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], - [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ - ...rowValues[FieldType.SIGNATURE_SINGLE], - url: expect.any(String), - }), - [FieldType.BIGINT]: rowValues[FieldType.BIGINT], - [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( - expect.objectContaining - ), - [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( - rowValues[FieldType.BB_REFERENCE_SINGLE] - ), - } - }) - - it("as csv", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.CSV - ) - - const jsonResult = await config.api.table.csvToJson({ - csvString: exportedValue, - }) - - const stringified = (value: string) => - JSON.stringify(value).replace(/"/g, "'") - - const matchingObject = ( - key: string, - value: any, - isArray: boolean - ) => { - const objectMatcher = `{'${key}':'${value[key]}'.*?}` - if (isArray) { - return expect.stringMatching( - new RegExp(`^\\[${objectMatcher}\\]$`) - ) - } - return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) - } - - expect(jsonResult).toEqual([ - { - ...expectedRowData, - auto: expect.any(String), - array: stringified(expectedRowData["array"]), - attachment: matchingObject( - "key", - expectedRowData["attachment"][0].sample, - true - ), - attachment_single: matchingObject( - "key", - expectedRowData["attachment_single"].sample, - false - ), - boolean: stringified(expectedRowData["boolean"]), - json: stringified(expectedRowData["json"]), - number: stringified(expectedRowData["number"]), - signature_single: matchingObject( - "key", - expectedRowData["signature_single"].sample, - false - ), - bb_reference: matchingObject( - "_id", - expectedRowData["bb_reference"][0].sample, - true - ), - bb_reference_single: matchingObject( - "_id", - expectedRowData["bb_reference_single"].sample, - false - ), - ai: "LLM Output", - }, - ]) - }) - - it("as json", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.JSON - ) - - const json = JSON.parse(exportedValue) - expect(json).toEqual([expectedRowData]) - }) - - it("as json with schema", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.JSON_WITH_SCHEMA - ) - - const json = JSON.parse(exportedValue) - expect(json).toEqual({ - schema: expect.any(Object), - rows: [expectedRowData], - }) - }) - - it("can handle csv-special characters in strings", async () => { - const badString = 'test":, wow", "test": "wow"' - const table = await config.api.table.save( - saveTableRequest({ - schema: { - string: { - type: FieldType.STRING, - name: "string", - }, - }, - }) - ) - - await config.api.row.save(table._id!, { string: badString }) - - const exportedValue = await config.api.row.exportRows( - table._id!, - { query: {} }, - RowExportFormat.CSV - ) - - const json = await config.api.table.csvToJson( - { - csvString: exportedValue, - }, - { - status: 200, - } - ) - - expect(json).toHaveLength(1) - expect(json[0].string).toEqual(badString) - }) - - it("exported data can be re-imported", async () => { - // export all - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.CSV - ) - - // import all twice - const rows = await config.api.table.csvToJson({ - csvString: exportedValue, - }) - await config.api.row.bulkImport(tableId, { - rows, - }) - await config.api.row.bulkImport(tableId, { - rows, - }) - - const { rows: allRows } = await config.api.row.search(tableId) - - const expectedRow = { - ...expectedRowData, - _id: expect.any(String), - _rev: expect.any(String), - type: "row", - tableId: tableId, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString(), - } - expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) - }) - }) - }) - - let o2mTable: Table - let m2mTable: Table - beforeAll(async () => { - o2mTable = await config.api.table.save(defaultTable()) - m2mTable = await config.api.table.save(defaultTable()) - }) - - describe.each([ - [ - "relationship fields", - (): Record => ({ - user: { - name: "user", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: o2mTable._id!, - fieldName: "fk_o2m", - }, - users: { - name: "users", - relationshipType: RelationshipType.MANY_TO_MANY, - type: FieldType.LINK, - tableId: m2mTable._id!, - fieldName: "fk_m2m", - }, - }), - (tableId: string) => - config.api.row.save(tableId, { - name: uuid.v4(), - description: generator.paragraph(), - tableId, - }), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.name, - }), - ], - [ - "bb reference fields", - (): Record => ({ - user: { - name: "user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - }, - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USERS, - }, - }), - () => config.createUser(), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.email, - email: row.email, - firstName: row.firstName, - lastName: row.lastName, - }), - ], - ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { - let tableId: string - let o2mData: Row[] - let m2mData: Row[] - - beforeAll(async () => { + // Upserting isn't yet supported in MSSQL / Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + it("should be able to update existing rows with bulkImport", async () => { const table = await config.api.table.save( - defaultTable({ schema: relSchema() }) + saveTableRequest({ + primary: ["userId"], + schema: { + userId: { + type: FieldType.NUMBER, + name: "userId", + constraints: { + presence: true, + }, + }, + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) ) - tableId = table._id! - o2mData = [ - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - ] + const row1 = await config.api.row.save(table._id!, { + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) - m2mData = [ - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - ] + const row2 = await config.api.row.save(table._id!, { + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") }) - it("can save a row when relationship fields are empty", async () => { - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - }) - - expect(row).toEqual({ - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - name: "foo", - description: "bar", - tableId, - }) - }) - - it("can save a row with a single relationship field", async () => { - const user = _.sample(o2mData)! - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - user: [user], - }) - - expect(row).toEqual({ - name: "foo", - description: "bar", - tableId, - user: [user].map(u => resultMapper(u)), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can save a row with a multiple relationship field", async () => { - const selectedUsers = _.sampleSize(m2mData, 2) - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: selectedUsers, - }) - - expect(row).toEqual({ - name: "foo", - description: "bar", - tableId, - users: expect.arrayContaining( - selectedUsers.map(u => resultMapper(u)) - ), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("can retrieve rows with no populated relationships", async () => { - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - }) - - const retrieved = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: "foo", - description: "bar", - tableId, - user: undefined, - users: undefined, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - ...defaultRowFields, - }) - }) - - it("can retrieve rows with populated relationships", async () => { - const user1 = _.sample(o2mData)! - const [user2, user3] = _.sampleSize(m2mData, 2) - - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [user2, user3], - user: [user1], - }) - - const retrieved = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: "foo", - description: "bar", - tableId, - user: expect.arrayContaining([user1].map(u => resultMapper(u))), - users: expect.arrayContaining( - [user2, user3].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, - ...defaultRowFields, - }) - }) - - it("can update an existing populated row", async () => { - const user = _.sample(o2mData)! - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [users1, users2], - }) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: [user], - users: [users3, users1], - }) - expect(updatedRow).toEqual({ - name: "foo", - description: "bar", - tableId, - user: expect.arrayContaining([user].map(u => resultMapper(u))), - users: expect.arrayContaining( - [users3, users1].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can wipe an existing populated relationships in row", async () => { - const [user1, user2] = _.sampleSize(m2mData, 2) - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [user1, user2], - }) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: null, - users: null, - }) - expect(updatedRow).toEqual({ - name: "foo", - description: "bar", - tableId, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("fetch all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows = [ - { - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.fetch(tableId) - - expect(res).toEqual( - expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ) - ) - }) - - it("search all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows = [ - { - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.search(tableId) - - expect(res).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - }), - }) - }) - }) - // Upserting isn't yet supported in MSSQL or Oracle, see: // https://github.com/knex/knex/pull/6050 !isMSSQL && !isOracle && - describe("relationships", () => { - let tableId: string - let viewId: string + !isInternal && + it("should be able to update existing rows with composite primary keys with bulkImport", async () => { + const tableName = uuid.v4() + await client?.schema.createTable(tableName, table => { + table.integer("companyId") + table.integer("userId") + table.string("name") + table.string("description") + table.primary(["companyId", "userId"]) + }) - let auxData: Row[] = [] + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] - beforeAll(async () => { - const aux2Table = await config.api.table.save(saveTableRequest()) - const aux2Data = await config.api.row.save(aux2Table._id!, {}) + const row1 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) - const auxTable = await config.api.table.save( - saveTableRequest({ - primaryDisplay: "name", + const row2 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["companyId", "userId"], + rows: [ + { + companyId: 1, + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + companyId: 1, + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + companyId: 1, + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL/Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + !isInternal && + it("should be able to update existing rows an autoID primary key", async () => { + const tableName = uuid.v4() + await client!.schema.createTable(tableName, table => { + table.increments("userId").primary() + table.string("name") + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + name: "Clare", + }) + + const row2 = await config.api.row.save(table._id!, { + name: "Jeff", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Clare updated", + }, + { + userId: row2.userId, + name: "Jeff updated", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Clare updated") + expect(rows[1].name).toEqual("Jeff updated") + }) + }) + + describe("enrich", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + it("should allow enriching some linked rows", async () => { + const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( + config.getTenantId(), + async () => { + const linkedTable = await config.api.table.save( + defaultTable({ schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, - }, - age: { - name: "age", - type: FieldType.NUMBER, - constraints: { presence: true }, - }, - address: { - name: "address", - type: FieldType.STRING, - constraints: { presence: true }, - visible: false, - }, link: { name: "link", + fieldName: "link", type: FieldType.LINK, - tableId: aux2Table._id!, - relationshipType: RelationshipType.MANY_TO_MANY, - fieldName: "fk_aux", - constraints: { presence: true }, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: "{{ any }}", - constraints: { presence: true }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: table._id!, }, }, }) ) - const auxTableId = auxTable._id! - - for (const name of generator.unique(() => generator.name(), 10)) { - auxData.push( - await config.api.row.save(auxTableId, { - name, - age: generator.age(), - address: generator.address(), - link: [aux2Data], - }) - ) - } - - const table = await config.api.table.save( - saveTableRequest({ - schema: { - title: { - name: "title", - type: FieldType.STRING, - constraints: { presence: true }, - }, - relWithNoSchema: { - name: "relWithNoSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithNoSchema", - constraints: { presence: true }, - }, - relWithEmptySchema: { - name: "relWithEmptySchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithEmptySchema", - constraints: { presence: true }, - }, - relWithFullSchema: { - name: "relWithFullSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithFullSchema", - constraints: { presence: true }, - }, - relWithHalfSchema: { - name: "relWithHalfSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithHalfSchema", - constraints: { presence: true }, - }, - relWithIllegalSchema: { - name: "relWithIllegalSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithIllegalSchema", - constraints: { presence: true }, - }, - }, - }) - ) - tableId = table._id! - const view = await config.api.viewV2.create({ - name: generator.guid(), - tableId, - schema: { - title: { - visible: true, - }, - relWithNoSchema: { - visible: true, - }, - relWithEmptySchema: { - visible: true, - columns: {}, - }, - relWithFullSchema: { - visible: true, - columns: Object.keys(auxTable.schema).reduce< - Record - >((acc, c) => ({ ...acc, [c]: { visible: true } }), {}), - }, - relWithHalfSchema: { - visible: true, - columns: { - name: { visible: true }, - age: { visible: false, readonly: true }, - }, - }, - relWithIllegalSchema: { - visible: true, - columns: { - name: { visible: true }, - address: { visible: true }, - unexisting: { visible: true }, - }, - }, - }, + const firstRow = await config.api.row.save(table._id!, { + name: "Test Contact", + description: "original description", }) + const secondRow = await config.api.row.save(linkedTable._id!, { + name: "Test 2", + description: "og desc", + link: [{ _id: firstRow._id }], + }) + return { linkedTable, firstRow, secondRow } + } + ) + const rowUsage = await getRowUsage() - viewId = view.id - }) + // test basic enrichment + const resBasic = await config.api.row.get( + linkedTable._id!, + secondRow._id! + ) + expect(resBasic.link.length).toBe(1) + expect(resBasic.link[0]).toEqual({ + _id: firstRow._id, + primaryDisplay: firstRow.name, + }) - const testScenarios: [string, (row: Row) => Promise | Row][] = [ - ["get row", (row: Row) => config.api.row.get(viewId, row._id!)], - [ - "from view search", - async (row: Row) => { - const { rows } = await config.api.viewV2.search(viewId) - return rows.find(r => r._id === row._id!) - }, - ], - ["from original saved row", (row: Row) => row], - ["from updated row", (row: Row) => config.api.row.save(viewId, row)], - ] + // test full enrichment + const resEnriched = await config.api.row.getEnriched( + linkedTable._id!, + secondRow._id! + ) + expect(resEnriched.link.length).toBe(1) + expect(resEnriched.link[0]._id).toBe(firstRow._id) + expect(resEnriched.link[0].name).toBe("Test Contact") + expect(resEnriched.link[0].description).toBe("original description") + await assertRowUsage(rowUsage) + }) + }) - it.each(testScenarios)( - "can retrieve rows with populated relationships (via %s)", - async (__, retrieveDelegate) => { - const otherRows = _.sampleSize(auxData, 5) - - const row = await config.api.row.save(viewId, { - title: generator.word(), - relWithNoSchema: [otherRows[0]], - relWithEmptySchema: [otherRows[1]], - relWithFullSchema: [otherRows[2]], - relWithHalfSchema: [otherRows[3]], - relWithIllegalSchema: [otherRows[4]], - }) - - const retrieved = await retrieveDelegate(row) - - expect(retrieved).toEqual( - expect.objectContaining({ - title: row.title, - relWithNoSchema: [ - { - _id: otherRows[0]._id, - primaryDisplay: otherRows[0].name, - }, - ], - relWithEmptySchema: [ - { - _id: otherRows[1]._id, - primaryDisplay: otherRows[1].name, - }, - ], - relWithFullSchema: [ - { - _id: otherRows[2]._id, - primaryDisplay: otherRows[2].name, - name: otherRows[2].name, - age: otherRows[2].age, - id: otherRows[2].id, - }, - ], - relWithHalfSchema: [ - { - _id: otherRows[3]._id, - primaryDisplay: otherRows[3].name, - name: otherRows[3].name, - }, - ], - relWithIllegalSchema: [ - { - _id: otherRows[4]._id, - primaryDisplay: otherRows[4].name, - name: otherRows[4].name, - }, - ], - }) - ) - } + isInternal && + describe("attachments and signatures", () => { + const coreAttachmentEnrichment = async ( + schema: TableSchema, + field: string, + attachmentCfg: string | string[] + ) => { + const testTable = await config.api.table.save( + defaultTable({ + schema, + }) ) - - it.each([ - [ - "from table fetch", - async (row: Row) => { - const rows = await config.api.row.fetch(tableId) - return rows.find(r => r._id === row._id!) - }, - ], - [ - "from table search", - async (row: Row) => { - const { rows } = await config.api.row.search(tableId) - return rows.find(r => r._id === row._id!) - }, - ], - ])( - "does not enrich when fetching from the table (via %s)", - async (__, retrieveDelegate) => { - const otherRows = _.sampleSize(auxData, 5) - - const row = await config.api.row.save(viewId, { - title: generator.word(), - relWithNoSchema: [otherRows[0]], - relWithEmptySchema: [otherRows[1]], - relWithFullSchema: [otherRows[2]], - relWithHalfSchema: [otherRows[3]], - relWithIllegalSchema: [otherRows[4]], - }) - - const retrieved = await retrieveDelegate(row) - - expect(retrieved).toEqual( - expect.objectContaining({ - title: row.title, - relWithNoSchema: [ - { - _id: otherRows[0]._id, - primaryDisplay: otherRows[0].name, - }, - ], - relWithEmptySchema: [ - { - _id: otherRows[1]._id, - primaryDisplay: otherRows[1].name, - }, - ], - relWithFullSchema: [ - { - _id: otherRows[2]._id, - primaryDisplay: otherRows[2].name, - }, - ], - relWithHalfSchema: [ - { - _id: otherRows[3]._id, - primaryDisplay: otherRows[3].name, - }, - ], - relWithIllegalSchema: [ - { - _id: otherRows[4]._id, - primaryDisplay: otherRows[4].name, - }, - ], - }) - ) + const attachmentToStoreKey = (attachmentId: string) => { + return { + key: `${config.getAppId()}/attachments/${attachmentId}`, } + } + const draftRow = { + name: "test", + description: "test", + [field]: + typeof attachmentCfg === "string" + ? attachmentToStoreKey(attachmentCfg) + : attachmentCfg.map(attachmentToStoreKey), + tableId: testTable._id, + } + const row = await config.api.row.save(testTable._id!, draftRow) + + await withEnv({ SELF_HOSTED: "true" }, async () => { + return context.doInAppContext(config.getAppId(), async () => { + const enriched: Row[] = await outputProcessing(testTable, [row]) + const [targetRow] = enriched + const attachmentEntries = Array.isArray(targetRow[field]) + ? targetRow[field] + : [targetRow[field]] + + for (const entry of attachmentEntries) { + const attachmentId = entry.key.split("/").pop() + expect(entry.url.split("?")[0]).toBe( + `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` + ) + } + }) + }) + } + + it("should allow enriching single attachment rows", async () => { + await coreAttachmentEnrichment( + { + attachment: { + type: FieldType.ATTACHMENT_SINGLE, + name: "attachment", + constraints: { presence: false }, + }, + }, + "attachment", + `${uuid.v4()}.csv` ) }) + it("should allow enriching attachment list rows", async () => { + await coreAttachmentEnrichment( + { + attachments: { + type: FieldType.ATTACHMENTS, + name: "attachments", + constraints: { type: "array", presence: false }, + }, + }, + "attachments", + [`${uuid.v4()}.csv`] + ) + }) + + it("should allow enriching signature rows", async () => { + await coreAttachmentEnrichment( + { + signature: { + type: FieldType.SIGNATURE_SINGLE, + name: "signature", + constraints: { presence: false }, + }, + }, + "signature", + `${uuid.v4()}.png` + ) + }) + }) + + describe("exportRows", () => { + beforeEach(async () => { + table = await config.api.table.save(defaultTable()) + }) + isInternal && - describe("AI fields", () => { - let table: Table - - beforeAll(async () => { - mocks.licenses.useBudibaseAI() - mocks.licenses.useAICustomConfigs() - table = await config.api.table.save( - saveTableRequest({ - schema: { - ai: { - name: "ai", - type: FieldType.AI, - operation: AIOperationEnum.PROMPT, - prompt: "Convert the following to German: '{{ product }}'", - }, - product: { - name: "product", - type: FieldType.STRING, - }, - }, - }) - ) - - await config.api.row.save(table._id!, { - product: generator.word(), - }) + it("should not export internal couchdb fields", async () => { + const existing = await config.api.row.save(table._id!, { + name: generator.guid(), + description: generator.paragraph(), }) - - afterAll(() => { - jest.unmock("@budibase/pro") + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] - it("should be able to save a row with an AI column", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - expect(rows[0].ai).toEqual("Mock LLM Response") + expect(Object.keys(row)).toEqual(["_id", "name", "description"]) + }) + + !isInternal && + it("should allow exporting all columns", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] - it("should be able to update a row with an AI column", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - await config.api.row.save(table._id!, { - product: generator.word(), - ...rows[0], - }) - expect(rows.length).toBe(1) - expect(rows[0].ai).toEqual("Mock LLM Response") + // Ensure all original columns were exported + expect(Object.keys(row).length).toBe(Object.keys(existing).length) + Object.keys(existing).forEach(key => { + expect(row[key]).toEqual(existing[key]) }) }) - describe("Formula fields", () => { - let table: Table - let otherTable: Table - let relatedRow: Row, mainRow: Row + it("should allow exporting without filtering", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + expect(row._id).toEqual(existing._id) + }) + + it("should allow exporting only certain columns", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + columns: ["_id"], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure only the _id column was exported + expect(Object.keys(row).length).toEqual(1) + expect(row._id).toEqual(existing._id) + }) + + it("should handle single quotes in row filtering", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [`['${existing._id!}']`], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + expect(row._id).toEqual(existing._id) + }) + + it("should return an error if no table is found", async () => { + const existing = await config.api.row.save(table._id!, {}) + await config.api.row.exportRows( + "1234567", + { rows: [existing._id!] }, + RowExportFormat.JSON, + { status: 404 } + ) + }) + + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. + !isMSSQL && + it("should handle filtering by composite primary keys", async () => { + const tableRequest = saveTableRequest({ + primary: ["number", "string"], + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + number: { + type: FieldType.NUMBER, + name: "number", + }, + }, + }) + delete tableRequest.schema.id + + const table = await config.api.table.save(tableRequest) + const toCreate = generator + .unique(() => generator.integer({ min: 0, max: 10000 }), 10) + .map(number => ({ number, string: generator.word({ length: 30 }) })) + + const rows = await Promise.all( + toCreate.map(d => config.api.row.save(table._id!, d)) + ) + + const res = await config.api.row.exportRows(table._id!, { + rows: _.sampleSize(rows, 3).map(r => r._id!), + }) + const results = JSON.parse(res) + expect(results.length).toEqual(3) + }) + + describe("should allow exporting all column types", () => { + let tableId: string + let expectedRowData: Row beforeAll(async () => { - otherTable = await config.api.table.save(defaultTable()) - table = await config.api.table.save( + const fullSchema = setup.structures.fullSchemaWithoutLinks({ + allRequired: true, + }) + + const table = await config.api.table.save( + saveTableRequest({ + ...setup.structures.basicTable(), + schema: fullSchema, + primary: ["string"], + }) + ) + tableId = table._id! + + const rowValues: Record = { + [FieldType.STRING]: generator.guid(), + [FieldType.LONGFORM]: generator.paragraph(), + [FieldType.OPTIONS]: "option 2", + [FieldType.ARRAY]: ["options 2", "options 4"], + [FieldType.NUMBER]: generator.natural(), + [FieldType.BOOLEAN]: generator.bool(), + [FieldType.DATETIME]: generator.date().toISOString(), + [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], + [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), + [FieldType.FORMULA]: undefined, // generated field + [FieldType.AUTO]: undefined, // generated field + [FieldType.AI]: "LLM Output", + [FieldType.JSON]: { name: generator.guid() }, + [FieldType.INTERNAL]: generator.guid(), + [FieldType.BARCODEQR]: generator.guid(), + [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), + [FieldType.BIGINT]: generator.integer().toString(), + [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], + [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, + } + const row = await config.api.row.save(table._id!, rowValues) + expectedRowData = { + _id: row._id, + [FieldType.STRING]: rowValues[FieldType.STRING], + [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], + [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], + [FieldType.ARRAY]: rowValues[FieldType.ARRAY], + [FieldType.NUMBER]: rowValues[FieldType.NUMBER], + [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], + [FieldType.DATETIME]: rowValues[FieldType.DATETIME], + [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( + (a: any) => + expect.objectContaining({ + ...a, + url: expect.any(String), + }) + ), + [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.ATTACHMENT_SINGLE], + url: expect.any(String), + }), + [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, + [FieldType.AUTO]: expect.any(Number), + [FieldType.AI]: expect.any(String), + [FieldType.JSON]: rowValues[FieldType.JSON], + [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], + [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], + [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.SIGNATURE_SINGLE], + url: expect.any(String), + }), + [FieldType.BIGINT]: rowValues[FieldType.BIGINT], + [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( + expect.objectContaining + ), + [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( + rowValues[FieldType.BB_REFERENCE_SINGLE] + ), + } + }) + + it("as csv", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + const jsonResult = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + + const stringified = (value: string) => + JSON.stringify(value).replace(/"/g, "'") + + const matchingObject = (key: string, value: any, isArray: boolean) => { + const objectMatcher = `{'${key}':'${value[key]}'.*?}` + if (isArray) { + return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`)) + } + return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) + } + + expect(jsonResult).toEqual([ + { + ...expectedRowData, + auto: expect.any(String), + array: stringified(expectedRowData["array"]), + attachment: matchingObject( + "key", + expectedRowData["attachment"][0].sample, + true + ), + attachment_single: matchingObject( + "key", + expectedRowData["attachment_single"].sample, + false + ), + boolean: stringified(expectedRowData["boolean"]), + json: stringified(expectedRowData["json"]), + number: stringified(expectedRowData["number"]), + signature_single: matchingObject( + "key", + expectedRowData["signature_single"].sample, + false + ), + bb_reference: matchingObject( + "_id", + expectedRowData["bb_reference"][0].sample, + true + ), + bb_reference_single: matchingObject( + "_id", + expectedRowData["bb_reference_single"].sample, + false + ), + ai: "LLM Output", + }, + ]) + }) + + it("as json", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual([expectedRowData]) + }) + + it("as json with schema", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON_WITH_SCHEMA + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual({ + schema: expect.any(Object), + rows: [expectedRowData], + }) + }) + + it("can handle csv-special characters in strings", async () => { + const badString = 'test":, wow", "test": "wow"' + const table = await config.api.table.save( saveTableRequest({ schema: { - links: { - name: "links", - fieldName: "links", + string: { + type: FieldType.STRING, + name: "string", + }, + }, + }) + ) + + await config.api.row.save(table._id!, { string: badString }) + + const exportedValue = await config.api.row.exportRows( + table._id!, + { query: {} }, + RowExportFormat.CSV + ) + + const json = await config.api.table.csvToJson( + { + csvString: exportedValue, + }, + { + status: 200, + } + ) + + expect(json).toHaveLength(1) + expect(json[0].string).toEqual(badString) + }) + + it("exported data can be re-imported", async () => { + // export all + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) + + // import all twice + const rows = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + + const { rows: allRows } = await config.api.row.search(tableId) + + const expectedRow = { + ...expectedRowData, + _id: expect.any(String), + _rev: expect.any(String), + type: "row", + tableId: tableId, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + } + expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) + }) + }) + }) + + let o2mTable: Table + let m2mTable: Table + beforeAll(async () => { + o2mTable = await config.api.table.save(defaultTable()) + m2mTable = await config.api.table.save(defaultTable()) + }) + + describe.each([ + [ + "relationship fields", + (): Record => ({ + user: { + name: "user", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: o2mTable._id!, + fieldName: "fk_o2m", + }, + users: { + name: "users", + relationshipType: RelationshipType.MANY_TO_MANY, + type: FieldType.LINK, + tableId: m2mTable._id!, + fieldName: "fk_m2m", + }, + }), + (tableId: string) => + config.api.row.save(tableId, { + name: uuid.v4(), + description: generator.paragraph(), + tableId, + }), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.name, + }), + ], + [ + "bb reference fields", + (): Record => ({ + user: { + name: "user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + }, + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USERS, + }, + }), + () => config.createUser(), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.email, + email: row.email, + firstName: row.firstName, + lastName: row.lastName, + }), + ], + ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { + let tableId: string + let o2mData: Row[] + let m2mData: Row[] + + beforeAll(async () => { + const table = await config.api.table.save( + defaultTable({ schema: relSchema() }) + ) + tableId = table._id! + + o2mData = [ + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + ] + + m2mData = [ + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + ] + }) + + it("can save a row when relationship fields are empty", async () => { + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + }) + + expect(row).toEqual({ + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + name: "foo", + description: "bar", + tableId, + }) + }) + + it("can save a row with a single relationship field", async () => { + const user = _.sample(o2mData)! + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + user: [user], + }) + + expect(row).toEqual({ + name: "foo", + description: "bar", + tableId, + user: [user].map(u => resultMapper(u)), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can save a row with a multiple relationship field", async () => { + const selectedUsers = _.sampleSize(m2mData, 2) + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: selectedUsers, + }) + + expect(row).toEqual({ + name: "foo", + description: "bar", + tableId, + users: expect.arrayContaining(selectedUsers.map(u => resultMapper(u))), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("can retrieve rows with no populated relationships", async () => { + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + }) + + const retrieved = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: "foo", + description: "bar", + tableId, + user: undefined, + users: undefined, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + ...defaultRowFields, + }) + }) + + it("can retrieve rows with populated relationships", async () => { + const user1 = _.sample(o2mData)! + const [user2, user3] = _.sampleSize(m2mData, 2) + + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [user2, user3], + user: [user1], + }) + + const retrieved = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: "foo", + description: "bar", + tableId, + user: expect.arrayContaining([user1].map(u => resultMapper(u))), + users: expect.arrayContaining([user2, user3].map(u => resultMapper(u))), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, + ...defaultRowFields, + }) + }) + + it("can update an existing populated row", async () => { + const user = _.sample(o2mData)! + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [users1, users2], + }) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: [user], + users: [users3, users1], + }) + expect(updatedRow).toEqual({ + name: "foo", + description: "bar", + tableId, + user: expect.arrayContaining([user].map(u => resultMapper(u))), + users: expect.arrayContaining( + [users3, users1].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can wipe an existing populated relationships in row", async () => { + const [user1, user2] = _.sampleSize(m2mData, 2) + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [user1, user2], + }) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: null, + users: null, + }) + expect(updatedRow).toEqual({ + name: "foo", + description: "bar", + tableId, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("fetch all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows = [ + { + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.fetch(tableId) + + expect(res).toEqual( + expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ) + ) + }) + + it("search all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows = [ + { + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.search(tableId) + + expect(res).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + }), + }) + }) + }) + + // Upserting isn't yet supported in MSSQL or Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + describe("relationships", () => { + let tableId: string + let viewId: string + + let auxData: Row[] = [] + + beforeAll(async () => { + const aux2Table = await config.api.table.save(saveTableRequest()) + const aux2Data = await config.api.row.save(aux2Table._id!, {}) + + const auxTable = await config.api.table.save( + saveTableRequest({ + primaryDisplay: "name", + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + age: { + name: "age", + type: FieldType.NUMBER, + constraints: { presence: true }, + }, + address: { + name: "address", + type: FieldType.STRING, + constraints: { presence: true }, + visible: false, + }, + link: { + name: "link", type: FieldType.LINK, - tableId: otherTable._id!, - relationshipType: RelationshipType.ONE_TO_MANY, + tableId: aux2Table._id!, + relationshipType: RelationshipType.MANY_TO_MANY, + fieldName: "fk_aux", + constraints: { presence: true }, }, formula: { name: "formula", type: FieldType.FORMULA, - formula: "{{ links.0.name }}", + formula: "{{ any }}", + constraints: { presence: true }, + }, + }, + }) + ) + const auxTableId = auxTable._id! + + for (const name of generator.unique(() => generator.name(), 10)) { + auxData.push( + await config.api.row.save(auxTableId, { + name, + age: generator.age(), + address: generator.address(), + link: [aux2Data], + }) + ) + } + + const table = await config.api.table.save( + saveTableRequest({ + schema: { + title: { + name: "title", + type: FieldType.STRING, + constraints: { presence: true }, + }, + relWithNoSchema: { + name: "relWithNoSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithNoSchema", + constraints: { presence: true }, + }, + relWithEmptySchema: { + name: "relWithEmptySchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithEmptySchema", + constraints: { presence: true }, + }, + relWithFullSchema: { + name: "relWithFullSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithFullSchema", + constraints: { presence: true }, + }, + relWithHalfSchema: { + name: "relWithHalfSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithHalfSchema", + constraints: { presence: true }, + }, + relWithIllegalSchema: { + name: "relWithIllegalSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithIllegalSchema", + constraints: { presence: true }, + }, + }, + }) + ) + tableId = table._id! + const view = await config.api.viewV2.create({ + name: generator.guid(), + tableId, + schema: { + title: { + visible: true, + }, + relWithNoSchema: { + visible: true, + }, + relWithEmptySchema: { + visible: true, + columns: {}, + }, + relWithFullSchema: { + visible: true, + columns: Object.keys(auxTable.schema).reduce< + Record + >((acc, c) => ({ ...acc, [c]: { visible: true } }), {}), + }, + relWithHalfSchema: { + visible: true, + columns: { + name: { visible: true }, + age: { visible: false, readonly: true }, + }, + }, + relWithIllegalSchema: { + visible: true, + columns: { + name: { visible: true }, + address: { visible: true }, + unexisting: { visible: true }, + }, + }, + }, + }) + + viewId = view.id + }) + + const testScenarios: [string, (row: Row) => Promise | Row][] = [ + ["get row", (row: Row) => config.api.row.get(viewId, row._id!)], + [ + "from view search", + async (row: Row) => { + const { rows } = await config.api.viewV2.search(viewId) + return rows.find(r => r._id === row._id!) + }, + ], + ["from original saved row", (row: Row) => row], + ["from updated row", (row: Row) => config.api.row.save(viewId, row)], + ] + + it.each(testScenarios)( + "can retrieve rows with populated relationships (via %s)", + async (__, retrieveDelegate) => { + const otherRows = _.sampleSize(auxData, 5) + + const row = await config.api.row.save(viewId, { + title: generator.word(), + relWithNoSchema: [otherRows[0]], + relWithEmptySchema: [otherRows[1]], + relWithFullSchema: [otherRows[2]], + relWithHalfSchema: [otherRows[3]], + relWithIllegalSchema: [otherRows[4]], + }) + + const retrieved = await retrieveDelegate(row) + + expect(retrieved).toEqual( + expect.objectContaining({ + title: row.title, + relWithNoSchema: [ + { + _id: otherRows[0]._id, + primaryDisplay: otherRows[0].name, + }, + ], + relWithEmptySchema: [ + { + _id: otherRows[1]._id, + primaryDisplay: otherRows[1].name, + }, + ], + relWithFullSchema: [ + { + _id: otherRows[2]._id, + primaryDisplay: otherRows[2].name, + name: otherRows[2].name, + age: otherRows[2].age, + id: otherRows[2].id, + }, + ], + relWithHalfSchema: [ + { + _id: otherRows[3]._id, + primaryDisplay: otherRows[3].name, + name: otherRows[3].name, + }, + ], + relWithIllegalSchema: [ + { + _id: otherRows[4]._id, + primaryDisplay: otherRows[4].name, + name: otherRows[4].name, + }, + ], + }) + ) + } + ) + + it.each([ + [ + "from table fetch", + async (row: Row) => { + const rows = await config.api.row.fetch(tableId) + return rows.find(r => r._id === row._id!) + }, + ], + [ + "from table search", + async (row: Row) => { + const { rows } = await config.api.row.search(tableId) + return rows.find(r => r._id === row._id!) + }, + ], + ])( + "does not enrich when fetching from the table (via %s)", + async (__, retrieveDelegate) => { + const otherRows = _.sampleSize(auxData, 5) + + const row = await config.api.row.save(viewId, { + title: generator.word(), + relWithNoSchema: [otherRows[0]], + relWithEmptySchema: [otherRows[1]], + relWithFullSchema: [otherRows[2]], + relWithHalfSchema: [otherRows[3]], + relWithIllegalSchema: [otherRows[4]], + }) + + const retrieved = await retrieveDelegate(row) + + expect(retrieved).toEqual( + expect.objectContaining({ + title: row.title, + relWithNoSchema: [ + { + _id: otherRows[0]._id, + primaryDisplay: otherRows[0].name, + }, + ], + relWithEmptySchema: [ + { + _id: otherRows[1]._id, + primaryDisplay: otherRows[1].name, + }, + ], + relWithFullSchema: [ + { + _id: otherRows[2]._id, + primaryDisplay: otherRows[2].name, + }, + ], + relWithHalfSchema: [ + { + _id: otherRows[3]._id, + primaryDisplay: otherRows[3].name, + }, + ], + relWithIllegalSchema: [ + { + _id: otherRows[4]._id, + primaryDisplay: otherRows[4].name, + }, + ], + }) + ) + } + ) + }) + + isInternal && + describe("AI fields", () => { + let table: Table + + beforeAll(async () => { + mocks.licenses.useBudibaseAI() + mocks.licenses.useAICustomConfigs() + table = await config.api.table.save( + saveTableRequest({ + schema: { + ai: { + name: "ai", + type: FieldType.AI, + operation: AIOperationEnum.PROMPT, + prompt: "Convert the following to German: '{{ product }}'", + }, + product: { + name: "product", + type: FieldType.STRING, + }, + }, + }) + ) + + await config.api.row.save(table._id!, { + product: generator.word(), + }) + }) + + afterAll(() => { + jest.unmock("@budibase/pro") + }) + + it("should be able to save a row with an AI column", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + expect(rows[0].ai).toEqual("Mock LLM Response") + }) + + it("should be able to update a row with an AI column", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + await config.api.row.save(table._id!, { + product: generator.word(), + ...rows[0], + }) + expect(rows.length).toBe(1) + expect(rows[0].ai).toEqual("Mock LLM Response") + }) + }) + + describe("Formula fields", () => { + let table: Table + let otherTable: Table + let relatedRow: Row, mainRow: Row + + beforeAll(async () => { + otherTable = await config.api.table.save(defaultTable()) + table = await config.api.table.save( + saveTableRequest({ + schema: { + links: { + name: "links", + fieldName: "links", + type: FieldType.LINK, + tableId: otherTable._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: "{{ links.0.name }}", + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + ) + + relatedRow = await config.api.row.save(otherTable._id!, { + name: generator.word(), + description: generator.paragraph(), + }) + mainRow = await config.api.row.save(table._id!, { + name: generator.word(), + description: generator.paragraph(), + tableId: table._id!, + links: [relatedRow._id], + }) + }) + + async function updateFormulaColumn( + formula: string, + opts?: { responseType?: FormulaResponseType; formulaType?: FormulaType } + ) { + table = await config.api.table.save({ + ...table, + schema: { + ...table.schema, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: formula, + responseType: opts?.responseType, + formulaType: opts?.formulaType || FormulaType.DYNAMIC, + }, + }, + }) + } + + it("should be able to search for rows containing formulas", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + expect(rows[0].links.length).toBe(1) + const row = rows[0] + expect(row.formula).toBe(relatedRow.name) + }) + + it("should coerce - number response type", async () => { + await updateFormulaColumn(encodeJS("return 1"), { + responseType: FieldType.NUMBER, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(1) + }) + + it("should coerce - boolean response type", async () => { + await updateFormulaColumn(encodeJS("return true"), { + responseType: FieldType.BOOLEAN, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(true) + }) + + it("should coerce - datetime response type", async () => { + await updateFormulaColumn(encodeJS("return new Date()"), { + responseType: FieldType.DATETIME, + }) + const { rows } = await config.api.row.search(table._id!) + expect(isDate(rows[0].formula)).toBe(true) + }) + + it("should coerce - datetime with invalid value", async () => { + await updateFormulaColumn(encodeJS("return 'a'"), { + responseType: FieldType.DATETIME, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBeUndefined() + }) + + it("should coerce handlebars", async () => { + await updateFormulaColumn("{{ add 1 1 }}", { + responseType: FieldType.NUMBER, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(2) + }) + + it("should coerce handlebars to string (default)", async () => { + await updateFormulaColumn("{{ add 1 1 }}", { + responseType: FieldType.STRING, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe("2") + }) + + isInternal && + it("should coerce a static handlebars formula", async () => { + await updateFormulaColumn(encodeJS("return 1"), { + responseType: FieldType.NUMBER, + formulaType: FormulaType.STATIC, + }) + // save the row to store the static value + await config.api.row.save(table._id!, mainRow) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(1) + }) + }) + + describe("Formula JS protection", () => { + it("should time out JS execution if a single cell takes too long", async () => { + await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => { + const js = encodeJS( + ` + let i = 0; + while (true) { + i++; + } + return i; + ` + ) + + const table = await config.api.table.save( + saveTableRequest({ + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: js, formulaType: FormulaType.DYNAMIC, }, }, }) ) - relatedRow = await config.api.row.save(otherTable._id!, { - name: generator.word(), - description: generator.paragraph(), - }) - mainRow = await config.api.row.save(table._id!, { - name: generator.word(), - description: generator.paragraph(), - tableId: table._id!, - links: [relatedRow._id], - }) - }) - - async function updateFormulaColumn( - formula: string, - opts?: { responseType?: FormulaResponseType; formulaType?: FormulaType } - ) { - table = await config.api.table.save({ - ...table, - schema: { - ...table.schema, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: formula, - responseType: opts?.responseType, - formulaType: opts?.formulaType || FormulaType.DYNAMIC, - }, - }, - }) - } - - it("should be able to search for rows containing formulas", async () => { + await config.api.row.save(table._id!, { text: "foo" }) const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - expect(rows[0].links.length).toBe(1) + expect(rows).toHaveLength(1) const row = rows[0] - expect(row.formula).toBe(relatedRow.name) + expect(row.text).toBe("foo") + expect(row.formula).toBe("Timed out while executing JS") }) - - it("should coerce - number response type", async () => { - await updateFormulaColumn(encodeJS("return 1"), { - responseType: FieldType.NUMBER, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(1) - }) - - it("should coerce - boolean response type", async () => { - await updateFormulaColumn(encodeJS("return true"), { - responseType: FieldType.BOOLEAN, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(true) - }) - - it("should coerce - datetime response type", async () => { - await updateFormulaColumn(encodeJS("return new Date()"), { - responseType: FieldType.DATETIME, - }) - const { rows } = await config.api.row.search(table._id!) - expect(isDate(rows[0].formula)).toBe(true) - }) - - it("should coerce - datetime with invalid value", async () => { - await updateFormulaColumn(encodeJS("return 'a'"), { - responseType: FieldType.DATETIME, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBeUndefined() - }) - - it("should coerce handlebars", async () => { - await updateFormulaColumn("{{ add 1 1 }}", { - responseType: FieldType.NUMBER, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(2) - }) - - it("should coerce handlebars to string (default)", async () => { - await updateFormulaColumn("{{ add 1 1 }}", { - responseType: FieldType.STRING, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe("2") - }) - - isInternal && - it("should coerce a static handlebars formula", async () => { - await updateFormulaColumn(encodeJS("return 1"), { - responseType: FieldType.NUMBER, - formulaType: FormulaType.STATIC, - }) - // save the row to store the static value - await config.api.row.save(table._id!, mainRow) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(1) - }) }) - describe("Formula JS protection", () => { - it("should time out JS execution if a single cell takes too long", async () => { - await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => { + it("should time out JS execution if a multiple cells take too long", async () => { + await withEnv( + { + JS_PER_INVOCATION_TIMEOUT_MS: 40, + JS_PER_REQUEST_TIMEOUT_MS: 80, + }, + async () => { const js = encodeJS( ` let i = 0; @@ -3359,127 +3390,83 @@ datasourceDescribe( }) ) - await config.api.row.save(table._id!, { text: "foo" }) - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(1) - const row = rows[0] - expect(row.text).toBe("foo") - expect(row.formula).toBe("Timed out while executing JS") - }) - }) + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: "foo" }) + } - it("should time out JS execution if a multiple cells take too long", async () => { - await withEnv( - { - JS_PER_INVOCATION_TIMEOUT_MS: 40, - JS_PER_REQUEST_TIMEOUT_MS: 80, - }, - async () => { - const js = encodeJS( - ` - let i = 0; - while (true) { - i++; + // Run this test 3 times to make sure that there's no cross-request + // pollution of the execution time tracking. + for (let reqs = 0; reqs < 3; reqs++) { + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + let i = 0 + for (; i < 10; i++) { + const row = rows[i] + if (row.formula !== JsTimeoutError.message) { + break } - return i; - ` - ) - - const table = await config.api.table.save( - saveTableRequest({ - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: js, - formulaType: FormulaType.DYNAMIC, - }, - }, - }) - ) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: "foo" }) } - // Run this test 3 times to make sure that there's no cross-request - // pollution of the execution time tracking. - for (let reqs = 0; reqs < 3; reqs++) { - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) + // Given the execution times are not deterministic, we can't be sure + // of the exact number of rows that were executed before the timeout + // but it should absolutely be at least 1. + expect(i).toBeGreaterThan(0) + expect(i).toBeLessThan(5) - let i = 0 - for (; i < 10; i++) { - const row = rows[i] - if (row.formula !== JsTimeoutError.message) { - break - } - } - - // Given the execution times are not deterministic, we can't be sure - // of the exact number of rows that were executed before the timeout - // but it should absolutely be at least 1. - expect(i).toBeGreaterThan(0) - expect(i).toBeLessThan(5) - - for (; i < 10; i++) { - const row = rows[i] - expect(row.text).toBe("foo") - expect(row.formula).toStartWith("CPU time limit exceeded ") - } + for (; i < 10; i++) { + const row = rows[i] + expect(row.text).toBe("foo") + expect(row.formula).toStartWith("CPU time limit exceeded ") } } - ) - }) - - it("should not carry over context between formulas", async () => { - const js = encodeJS(`return $("[text]");`) - const table = await config.api.table.save( - saveTableRequest({ - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: js, - formulaType: FormulaType.DYNAMIC, - }, - }, - }) - ) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: `foo${i}` }) } - - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) - - const formulaValues = rows.map(r => r.formula) - expect(formulaValues).toEqual( - expect.arrayContaining([ - "foo0", - "foo1", - "foo2", - "foo3", - "foo4", - "foo5", - "foo6", - "foo7", - "foo8", - "foo9", - ]) - ) - }) + ) }) - } -) + + it("should not carry over context between formulas", async () => { + const js = encodeJS(`return $("[text]");`) + const table = await config.api.table.save( + saveTableRequest({ + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: js, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + ) + + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: `foo${i}` }) + } + + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + const formulaValues = rows.map(r => r.formula) + expect(formulaValues).toEqual( + expect.arrayContaining([ + "foo0", + "foo1", + "foo2", + "foo3", + "foo4", + "foo5", + "foo6", + "foo7", + "foo8", + "foo9", + ]) + ) + }) + }) +}) // todo: remove me diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts index 5e7316c39a..0c5c308fb2 100644 --- a/packages/server/src/integrations/tests/utils/index.ts +++ b/packages/server/src/integrations/tests/utils/index.ts @@ -35,7 +35,6 @@ const providers: Record = { } export interface DatasourceDescribeOpts { - name: string only?: DatabaseName[] exclude?: DatabaseName[] } @@ -103,15 +102,14 @@ function createDummyTest() { } export function datasourceDescribe( - opts: DatasourceDescribeOpts, - cb: (args: DatasourceDescribeReturn) => void -) { + opts: DatasourceDescribeOpts +): DatabaseName[] { if (process.env.DATASOURCE === "none") { createDummyTest() - return + process.exit(0) } - const { name, only, exclude } = opts + const { only, exclude } = opts if (only && exclude) { throw new Error("you can only supply one of 'only' or 'exclude'") @@ -130,36 +128,32 @@ export function datasourceDescribe( if (databases.length === 0) { createDummyTest() - return + process.exit(0) } - describe.each(databases)(name, name => { - const config = new TestConfiguration() + return databases +} - afterAll(() => { - config.end() - }) - - cb({ - name, - config, - dsProvider: () => createDatasources(config, name), - isInternal: name === DatabaseName.SQS, - isExternal: name !== DatabaseName.SQS, - isSql: [ - DatabaseName.MARIADB, - DatabaseName.MYSQL, - DatabaseName.POSTGRES, - DatabaseName.SQL_SERVER, - DatabaseName.ORACLE, - ].includes(name), - isMySQL: name === DatabaseName.MYSQL, - isPostgres: name === DatabaseName.POSTGRES, - isMongodb: name === DatabaseName.MONGODB, - isMSSQL: name === DatabaseName.SQL_SERVER, - isOracle: name === DatabaseName.ORACLE, - }) - }) +export function datasourceProps(dbName: DatabaseName) { + const config = new TestConfiguration() + return { + config, + dsProvider: () => createDatasources(config, dbName), + isInternal: dbName === DatabaseName.SQS, + isExternal: dbName !== DatabaseName.SQS, + isSql: [ + DatabaseName.MARIADB, + DatabaseName.MYSQL, + DatabaseName.POSTGRES, + DatabaseName.SQL_SERVER, + DatabaseName.ORACLE, + ].includes(dbName), + isMySQL: dbName === DatabaseName.MYSQL, + isPostgres: dbName === DatabaseName.POSTGRES, + isMongodb: dbName === DatabaseName.MONGODB, + isMSSQL: dbName === DatabaseName.SQL_SERVER, + isOracle: dbName === DatabaseName.ORACLE, + } } function getDatasource( From c7a0489b82440c476fe7372ee8387c582ef2846c Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 18 Nov 2024 17:34:35 +0000 Subject: [PATCH 02/16] Single function. --- .../server/src/api/routes/tests/row.spec.ts | 6418 +++++++++-------- .../src/integrations/tests/utils/index.ts | 15 +- 2 files changed, 3227 insertions(+), 3206 deletions(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 2eb7c45e58..02995e6d0a 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -3,7 +3,6 @@ import * as setup from "./utilities" import { DatabaseName, datasourceDescribe, - datasourceProps, } from "../../../integrations/tests/utils" import tk from "timekeeper" @@ -86,404 +85,163 @@ function encodeJS(binding: string) { return `{{ js "${Buffer.from(binding).toString("base64")}"}}` } -const databases = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) -describe.each(databases)("/rows (%s)", dbName => { - const { config, dsProvider, isInternal, isMSSQL, isOracle } = - datasourceProps(dbName) - let table: Table - let datasource: Datasource | undefined - let client: Knex | undefined - - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - client = ds.client - }) - - afterAll(async () => { - setup.afterAll() - }) - - function saveTableRequest( - // We omit the name field here because it's generated in the function with a - // high likelihood to be unique. Tests should not have any reason to control - // the table name they're writing to. - ...overrides: Partial>[] - ): SaveTableRequest { - const defaultSchema: TableSchema = { - id: { - type: FieldType.NUMBER, - name: "id", - autocolumn: true, - constraints: { - presence: true, - }, - }, - } - - for (const override of overrides) { - if (override.primary) { - delete defaultSchema.id - } - } - - const req: SaveTableRequest = { - name: uuid.v4().substring(0, 10), - type: "table", - sourceType: datasource - ? TableSourceType.EXTERNAL - : TableSourceType.INTERNAL, - sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, - primary: ["id"], - schema: defaultSchema, - } - const merged = merge(req, ...overrides) - return merged - } - - function defaultTable( - // We omit the name field here because it's generated in the function with a - // high likelihood to be unique. Tests should not have any reason to control - // the table name they're writing to. - ...overrides: Partial>[] - ): SaveTableRequest { - return saveTableRequest( - { - primaryDisplay: "name", - schema: { - name: { - type: FieldType.STRING, - name: "name", - constraints: { - type: "string", - }, - }, - description: { - type: FieldType.STRING, - name: "description", - constraints: { - type: "string", - }, - }, - }, - }, - ...overrides - ) - } - - beforeEach(async () => { - mocks.licenses.useCloudFree() - }) - - const getRowUsage = async () => { - const { total } = await config.doInContext(undefined, () => - quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) - ) - return total - } - - const assertRowUsage = async (expected: number) => { - const usage = await getRowUsage() - - // Because our quota tracking is not perfect, we allow a 10% margin of - // error. This is to account for the fact that parallel writes can result - // in some quota updates getting lost. We don't have any need to solve this - // right now, so we just allow for some error. - if (expected === 0) { - expect(usage).toEqual(0) - return - } - expect(usage).toBeGreaterThan(expected * 0.9) - expect(usage).toBeLessThan(expected * 1.1) - } - - const defaultRowFields = isInternal - ? { - type: "row", - createdAt: timestamp, - updatedAt: timestamp, - } - : undefined - - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - describe("create", () => { - it("creates a new row successfully", async () => { - const rowUsage = await getRowUsage() - const row = await config.api.row.save(table._id!, { - name: "Test Contact", - }) - expect(row.name).toEqual("Test Contact") - expect(row._rev).toBeDefined() - await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage) - }) - - it("fails to create a row for a table that does not exist", async () => { - const rowUsage = await getRowUsage() - await config.api.row.save("1234567", {}, { status: 404 }) - await assertRowUsage(rowUsage) - }) - - it("fails to create a row if required fields are missing", async () => { - const rowUsage = await getRowUsage() - const table = await config.api.table.save( - saveTableRequest({ - schema: { - required: { - type: FieldType.STRING, - name: "required", - constraints: { - type: "string", - presence: true, - }, - }, - }, - }) - ) - await config.api.row.save( - table._id!, - {}, - { - status: 500, - body: { - validationErrors: { - required: ["can't be blank"], - }, - }, - } - ) - await assertRowUsage(rowUsage) - }) - - isInternal && - it("increment row autoId per create row request", async () => { - const rowUsage = await getRowUsage() - - const newTable = await config.api.table.save( - saveTableRequest({ - schema: { - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, - constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, - }, - }, - }, - }) - ) - - let previousId = 0 - for (let i = 0; i < 10; i++) { - const row = await config.api.row.save(newTable._id!, {}) - expect(row["Row ID"]).toBeGreaterThan(previousId) - previousId = row["Row ID"] - } - await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage) - }) - - isInternal && - it("should increment auto ID correctly when creating rows in parallel", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - "Row ID": { - name: "Row ID", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - icon: "ri-magic-line", - autocolumn: true, - constraints: { - type: "number", - presence: true, - numericality: { - greaterThanOrEqualTo: "", - lessThanOrEqualTo: "", - }, - }, - }, - }, - }) - ) - - const sequence = Array(50) - .fill(0) - .map((_, i) => i + 1) - - // This block of code is simulating users creating auto ID rows at the - // same time. It's expected that this operation will sometimes return - // a document conflict error (409), but the idea is to retry in those - // situations. The code below does this a large number of times with - // small, random delays between them to try and get through the list - // as quickly as possible. - await Promise.all( - sequence.map(async () => { - const attempts = 30 - for (let attempt = 0; attempt < attempts; attempt++) { - try { - await config.api.row.save(table._id!, {}) - return - } catch (e) { - await new Promise(r => setTimeout(r, Math.random() * 50)) - } - } - throw new Error(`Failed to create row after ${attempts} attempts`) - }) - ) - - const rows = await config.api.row.fetch(table._id!) - expect(rows).toHaveLength(50) - - // The main purpose of this test is to ensure that even under pressure, - // we maintain data integrity. An auto ID column should hand out - // monotonically increasing unique integers no matter what. - const ids = rows.map(r => r["Row ID"]) - expect(ids).toEqual(expect.arrayContaining(sequence)) - }) - - isInternal && - it("doesn't allow creating in user table", async () => { - const response = await config.api.row.save( - InternalTable.USER_METADATA, - { - firstName: "Joe", - lastName: "Joe", - email: "joe@joe.com", - roles: {}, - }, - { status: 400 } - ) - expect(response.message).toBe("Cannot create new user entry.") - }) - - it("should not mis-parse date string out of JSON", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, - }) - ) - - const row = await config.api.row.save(table._id!, { - name: `{ "foo": "2023-01-26T11:48:57.000Z" }`, - }) - - expect(row.name).toEqual(`{ "foo": "2023-01-26T11:48:57.000Z" }`) - }) - - describe("default values", () => { +if (descriptions.length) { + describe.each(descriptions)( + "/rows ($dbName)", + ({ config, dsProvider, isInternal, isMSSQL, isOracle }) => { let table: Table + let datasource: Datasource | undefined + let client: Knex | undefined - describe("string column", () => { - beforeAll(async () => { - table = await config.api.table.save( + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource + client = ds.client + }) + + afterAll(async () => { + setup.afterAll() + }) + + function saveTableRequest( + // We omit the name field here because it's generated in the function with a + // high likelihood to be unique. Tests should not have any reason to control + // the table name they're writing to. + ...overrides: Partial>[] + ): SaveTableRequest { + const defaultSchema: TableSchema = { + id: { + type: FieldType.NUMBER, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, + }, + } + + for (const override of overrides) { + if (override.primary) { + delete defaultSchema.id + } + } + + const req: SaveTableRequest = { + name: uuid.v4().substring(0, 10), + type: "table", + sourceType: datasource + ? TableSourceType.EXTERNAL + : TableSourceType.INTERNAL, + sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, + primary: ["id"], + schema: defaultSchema, + } + const merged = merge(req, ...overrides) + return merged + } + + function defaultTable( + // We omit the name field here because it's generated in the function with a + // high likelihood to be unique. Tests should not have any reason to control + // the table name they're writing to. + ...overrides: Partial>[] + ): SaveTableRequest { + return saveTableRequest( + { + primaryDisplay: "name", + schema: { + name: { + type: FieldType.STRING, + name: "name", + constraints: { + type: "string", + }, + }, + description: { + type: FieldType.STRING, + name: "description", + constraints: { + type: "string", + }, + }, + }, + }, + ...overrides + ) + } + + beforeEach(async () => { + mocks.licenses.useCloudFree() + }) + + const getRowUsage = async () => { + const { total } = await config.doInContext(undefined, () => + quotas.getCurrentUsageValues( + QuotaUsageType.STATIC, + StaticQuotaName.ROWS + ) + ) + return total + } + + const assertRowUsage = async (expected: number) => { + const usage = await getRowUsage() + + // Because our quota tracking is not perfect, we allow a 10% margin of + // error. This is to account for the fact that parallel writes can result + // in some quota updates getting lost. We don't have any need to solve this + // right now, so we just allow for some error. + if (expected === 0) { + expect(usage).toEqual(0) + return + } + expect(usage).toBeGreaterThan(expected * 0.9) + expect(usage).toBeLessThan(expected * 1.1) + } + + const defaultRowFields = isInternal + ? { + type: "row", + createdAt: timestamp, + updatedAt: timestamp, + } + : undefined + + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + describe("create", () => { + it("creates a new row successfully", async () => { + const rowUsage = await getRowUsage() + const row = await config.api.row.save(table._id!, { + name: "Test Contact", + }) + expect(row.name).toEqual("Test Contact") + expect(row._rev).toBeDefined() + await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage) + }) + + it("fails to create a row for a table that does not exist", async () => { + const rowUsage = await getRowUsage() + await config.api.row.save("1234567", {}, { status: 404 }) + await assertRowUsage(rowUsage) + }) + + it("fails to create a row if required fields are missing", async () => { + const rowUsage = await getRowUsage() + const table = await config.api.table.save( saveTableRequest({ schema: { - description: { - name: "description", + required: { type: FieldType.STRING, - default: "default description", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.description).toEqual("default description") - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - description: "specified description", - }) - expect(row.description).toEqual("specified description") - }) - - it("uses the default value if value is null", async () => { - const row = await config.api.row.save(table._id!, { - description: null, - }) - expect(row.description).toEqual("default description") - }) - - it("uses the default value if value is undefined", async () => { - const row = await config.api.row.save(table._id!, { - description: undefined, - }) - expect(row.description).toEqual("default description") - }) - }) - - describe("number column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - age: { - name: "age", - type: FieldType.NUMBER, - default: "25", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.age).toEqual(25) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - age: 30, - }) - expect(row.age).toEqual(30) - }) - }) - - describe("date column", () => { - it("creates a row with a default value successfully", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - date: { - name: "date", - type: FieldType.DATETIME, - default: "2023-01-26T11:48:57.000Z", - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - expect(row.date).toEqual("2023-01-26T11:48:57.000Z") - }) - - it("gives an error if the default value is invalid", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - date: { - name: "date", - type: FieldType.DATETIME, - default: "invalid", + name: "required", + constraints: { + type: "string", + presence: true, + }, }, }, }) @@ -492,297 +250,193 @@ describe.each(databases)("/rows (%s)", dbName => { table._id!, {}, { - status: 400, + status: 500, body: { - message: `Invalid default value for field 'date' - Invalid date value: "invalid"`, + validationErrors: { + required: ["can't be blank"], + }, }, } ) - }) - }) - - describe("options column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - status: { - name: "status", - type: FieldType.OPTIONS, - default: "requested", - constraints: { - inclusion: ["requested", "approved"], - }, - }, - }, - }) - ) + await assertRowUsage(rowUsage) }) - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.status).toEqual("requested") - }) + isInternal && + it("increment row autoId per create row request", async () => { + const rowUsage = await getRowUsage() - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - status: "approved", - }) - expect(row.status).toEqual("approved") - }) - }) - - describe("array column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - food: { - name: "food", - type: FieldType.ARRAY, - default: ["apple", "orange"], - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["apple", "orange", "banana"], - }, - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.food).toEqual(["apple", "orange"]) - }) - - it("creates a new row with a default value when given an empty list", async () => { - const row = await config.api.row.save(table._id!, { food: [] }) - expect(row.food).toEqual(["apple", "orange"]) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - food: ["orange"], - }) - expect(row.food).toEqual(["orange"]) - }) - - it("resets back to its default value when empty", async () => { - let row = await config.api.row.save(table._id!, { - food: ["orange"], - }) - row = await config.api.row.save(table._id!, { ...row, food: [] }) - expect(row.food).toEqual(["apple", "orange"]) - }) - }) - - describe("user column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - default: "{{ [Current User]._id }}", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.user._id).toEqual(config.getUser()._id) - }) - - it("does not use default value if value specified", async () => { - const id = `us_${utils.newid()}` - await config.createUser({ _id: id }) - const row = await config.api.row.save(table._id!, { - user: id, - }) - expect(row.user._id).toEqual(id) - }) - }) - - describe("multi-user column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - default: ["{{ [Current User]._id }}"], - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.users).toHaveLength(1) - expect(row.users[0]._id).toEqual(config.getUser()._id) - }) - - it("does not use default value if value specified", async () => { - const id = `us_${utils.newid()}` - await config.createUser({ _id: id }) - const row = await config.api.row.save(table._id!, { - users: [id], - }) - expect(row.users).toHaveLength(1) - expect(row.users[0]._id).toEqual(id) - }) - }) - - describe("boolean column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - active: { - name: "active", - type: FieldType.BOOLEAN, - default: "true", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.active).toEqual(true) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - active: false, - }) - expect(row.active).toEqual(false) - }) - }) - - describe("bigint column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - bigNumber: { - name: "bigNumber", - type: FieldType.BIGINT, - default: "1234567890", - }, - }, - }) - ) - }) - - it("creates a new row with a default value successfully", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.bigNumber).toEqual("1234567890") - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - bigNumber: "9876543210", - }) - expect(row.bigNumber).toEqual("9876543210") - }) - }) - - describe("bindings", () => { - describe("string column", () => { - beforeAll(async () => { - table = await config.api.table.save( + const newTable = await config.api.table.save( saveTableRequest({ schema: { - description: { - name: "description", - type: FieldType.STRING, - default: `{{ date now "YYYY-MM-DDTHH:mm:ss" }}`, - }, - }, - }) - ) - }) - - it("can use bindings in default values", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.description).toMatch( - /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/ - ) - }) - - it("does not use default value if value specified", async () => { - const row = await config.api.row.save(table._id!, { - description: "specified description", - }) - expect(row.description).toEqual("specified description") - }) - - it("can bind the current user", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.STRING, - default: `{{ [Current User]._id }}`, - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - expect(row.user).toEqual(config.getUser()._id) - }) - - it("cannot access current user password", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.STRING, - default: `{{ user.password }}`, - }, - }, - }) - ) - const row = await config.api.row.save(table._id!, {}) - // For some reason it's null for internal tables, and undefined for - // external. - expect(row.user == null).toBe(true) - }) - }) - - describe("number column", () => { - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - age: { - name: "age", + "Row ID": { + name: "Row ID", type: FieldType.NUMBER, - default: `{{ sum 10 10 5 }}`, + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, }, }, }) ) + + let previousId = 0 + for (let i = 0; i < 10; i++) { + const row = await config.api.row.save(newTable._id!, {}) + expect(row["Row ID"]).toBeGreaterThan(previousId) + previousId = row["Row ID"] + } + await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage) }) - it("can use bindings in default values", async () => { - const row = await config.api.row.save(table._id!, {}) - expect(row.age).toEqual(25) + isInternal && + it("should increment auto ID correctly when creating rows in parallel", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + "Row ID": { + name: "Row ID", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: true, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, + }, + }, + }) + ) + + const sequence = Array(50) + .fill(0) + .map((_, i) => i + 1) + + // This block of code is simulating users creating auto ID rows at the + // same time. It's expected that this operation will sometimes return + // a document conflict error (409), but the idea is to retry in those + // situations. The code below does this a large number of times with + // small, random delays between them to try and get through the list + // as quickly as possible. + await Promise.all( + sequence.map(async () => { + const attempts = 30 + for (let attempt = 0; attempt < attempts; attempt++) { + try { + await config.api.row.save(table._id!, {}) + return + } catch (e) { + await new Promise(r => setTimeout(r, Math.random() * 50)) + } + } + throw new Error( + `Failed to create row after ${attempts} attempts` + ) + }) + ) + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(50) + + // The main purpose of this test is to ensure that even under pressure, + // we maintain data integrity. An auto ID column should hand out + // monotonically increasing unique integers no matter what. + const ids = rows.map(r => r["Row ID"]) + expect(ids).toEqual(expect.arrayContaining(sequence)) }) - describe("invalid default value", () => { + isInternal && + it("doesn't allow creating in user table", async () => { + const response = await config.api.row.save( + InternalTable.USER_METADATA, + { + firstName: "Joe", + lastName: "Joe", + email: "joe@joe.com", + roles: {}, + }, + { status: 400 } + ) + expect(response.message).toBe("Cannot create new user entry.") + }) + + it("should not mis-parse date string out of JSON", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) + ) + + const row = await config.api.row.save(table._id!, { + name: `{ "foo": "2023-01-26T11:48:57.000Z" }`, + }) + + expect(row.name).toEqual(`{ "foo": "2023-01-26T11:48:57.000Z" }`) + }) + + describe("default values", () => { + let table: Table + + describe("string column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + description: { + name: "description", + type: FieldType.STRING, + default: "default description", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.description).toEqual("default description") + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + description: "specified description", + }) + expect(row.description).toEqual("specified description") + }) + + it("uses the default value if value is null", async () => { + const row = await config.api.row.save(table._id!, { + description: null, + }) + expect(row.description).toEqual("default description") + }) + + it("uses the default value if value is undefined", async () => { + const row = await config.api.row.save(table._id!, { + description: undefined, + }) + expect(row.description).toEqual("default description") + }) + }) + + describe("number column", () => { beforeAll(async () => { table = await config.api.table.save( saveTableRequest({ @@ -790,2589 +444,3016 @@ describe.each(databases)("/rows (%s)", dbName => { age: { name: "age", type: FieldType.NUMBER, - default: `{{ capitalize "invalid" }}`, + default: "25", }, }, }) ) }) - it("throws an error when invalid default value", async () => { + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.age).toEqual(25) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + age: 30, + }) + expect(row.age).toEqual(30) + }) + }) + + describe("date column", () => { + it("creates a row with a default value successfully", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + date: { + name: "date", + type: FieldType.DATETIME, + default: "2023-01-26T11:48:57.000Z", + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + expect(row.date).toEqual("2023-01-26T11:48:57.000Z") + }) + + it("gives an error if the default value is invalid", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + date: { + name: "date", + type: FieldType.DATETIME, + default: "invalid", + }, + }, + }) + ) await config.api.row.save( table._id!, {}, { status: 400, body: { - message: - "Invalid default value for field 'age' - Invalid number value \"Invalid\"", + message: `Invalid default value for field 'date' - Invalid date value: "invalid"`, }, } ) }) }) - }) - }) - }) - describe("relations to same table", () => { - let relatedRows: Row[] + describe("options column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + status: { + name: "status", + type: FieldType.OPTIONS, + default: "requested", + constraints: { + inclusion: ["requested", "approved"], + }, + }, + }, + }) + ) + }) - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.status).toEqual("requested") + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + status: "approved", + }) + expect(row.status).toEqual("approved") + }) }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, + + describe("array column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + food: { + name: "food", + type: FieldType.ARRAY, + default: ["apple", "orange"], + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["apple", "orange", "banana"], + }, + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.food).toEqual(["apple", "orange"]) + }) + + it("creates a new row with a default value when given an empty list", async () => { + const row = await config.api.row.save(table._id!, { food: [] }) + expect(row.food).toEqual(["apple", "orange"]) + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + food: ["orange"], + }) + expect(row.food).toEqual(["orange"]) + }) + + it("resets back to its default value when empty", async () => { + let row = await config.api.row.save(table._id!, { + food: ["orange"], + }) + row = await config.api.row.save(table._id!, { ...row, food: [] }) + expect(row.food).toEqual(["apple", "orange"]) + }) }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - it("can create rows with both relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) + describe("user column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + default: "{{ [Current User]._id }}", + }, + }, + }) + ) + }) - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related1: [ - { - _id: relatedRows[0]._id, - primaryDisplay: relatedRows[0].name, - }, - ], - related2: [ - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ], + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.user._id).toEqual(config.getUser()._id) + }) + + it("does not use default value if value specified", async () => { + const id = `us_${utils.newid()}` + await config.createUser({ _id: id }) + const row = await config.api.row.save(table._id!, { + user: id, + }) + expect(row.user._id).toEqual(id) + }) }) - ) - }) - it("can create rows with no relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - }) + describe("multi-user column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + default: ["{{ [Current User]._id }}"], + }, + }, + }) + ) + }) - expect(row.related1).toBeUndefined() - expect(row.related2).toBeUndefined() - }) + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.users).toHaveLength(1) + expect(row.users[0]._id).toEqual(config.getUser()._id) + }) - it("can create rows with only one relationships field", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [], - related2: [relatedRows[1]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related2: [ - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ], + it("does not use default value if value specified", async () => { + const id = `us_${utils.newid()}` + await config.createUser({ _id: id }) + const row = await config.api.row.save(table._id!, { + users: [id], + }) + expect(row.users).toHaveLength(1) + expect(row.users[0]._id).toEqual(id) + }) }) - ) - expect(row.related1).toBeUndefined() - }) - }) - }) - describe("get", () => { - it("reads an existing row successfully", async () => { - const existing = await config.api.row.save(table._id!, {}) + describe("boolean column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + active: { + name: "active", + type: FieldType.BOOLEAN, + default: "true", + }, + }, + }) + ) + }) - const res = await config.api.row.get(table._id!, existing._id!) + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.active).toEqual(true) + }) - expect(res).toEqual({ - ...existing, - ...defaultRowFields, - }) - }) - - it("returns 404 when row does not exist", async () => { - const table = await config.api.table.save(defaultTable()) - await config.api.row.save(table._id!, {}) - await config.api.row.get(table._id!, "1234567", { - status: 404, - }) - }) - - isInternal && - it("can search row from user table", async () => { - const res = await config.api.row.get( - InternalTables.USER_METADATA, - config.userMetadataId! - ) - - expect(res).toEqual({ - ...config.getUser(), - _id: config.userMetadataId!, - _rev: expect.any(String), - roles: undefined, - roleId: "ADMIN", - tableId: InternalTables.USER_METADATA, - }) - }) - }) - - describe("fetch", () => { - it("fetches all rows for given tableId", async () => { - const table = await config.api.table.save(defaultTable()) - const rows = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - - const res = await config.api.row.fetch(table._id!) - expect(res.map(r => r._id)).toEqual( - expect.arrayContaining(rows.map(r => r._id)) - ) - }) - - it("returns 404 when table does not exist", async () => { - await config.api.row.fetch("1234567", { status: 404 }) - }) - }) - - describe("update", () => { - it("updates an existing row successfully", async () => { - const existing = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.save(table._id!, { - _id: existing._id, - _rev: existing._rev, - name: "Updated Name", - }) - - expect(res.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - !isInternal && - it("can update a row on an external table with a primary key", async () => { - const tableName = uuid.v4().substring(0, 10) - await client!.schema.createTable(tableName, table => { - table.increments("id").primary() - table.string("name") - }) - - const res = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = res.datasource.entities![tableName] - - const row = await config.api.row.save(table._id!, { - id: 1, - name: "Row 1", - }) - - const updatedRow = await config.api.row.save(table._id!, { - _id: row._id!, - name: "Row 1 Updated", - }) - - expect(updatedRow.name).toEqual("Row 1 Updated") - - const rows = await config.api.row.fetch(table._id!) - expect(rows).toHaveLength(1) - }) - - describe("relations to same table", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + active: false, + }) + expect(row.active).toEqual(false) + }) }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, + + describe("bigint column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + bigNumber: { + name: "bigNumber", + type: FieldType.BIGINT, + default: "1234567890", + }, + }, + }) + ) + }) + + it("creates a new row with a default value successfully", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.bigNumber).toEqual("1234567890") + }) + + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + bigNumber: "9876543210", + }) + expect(row.bigNumber).toEqual("9876543210") + }) }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - it("can edit rows with both relationships", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) + describe("bindings", () => { + describe("string column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + description: { + name: "description", + type: FieldType.STRING, + default: `{{ date now "YYYY-MM-DDTHH:mm:ss" }}`, + }, + }, + }) + ) + }) - row = await config.api.row.save(table._id!, { - ...row, - related1: [relatedRows[0]._id!, relatedRows[1]._id!], - related2: [relatedRows[2]._id!], - }) + it("can use bindings in default values", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.description).toMatch( + /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/ + ) + }) - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related1: expect.arrayContaining([ - { - _id: relatedRows[0]._id, - primaryDisplay: relatedRows[0].name, - }, - { - _id: relatedRows[1]._id, - primaryDisplay: relatedRows[1].name, - }, - ]), - related2: [ - { - _id: relatedRows[2]._id, - primaryDisplay: relatedRows[2].name, - }, - ], + it("does not use default value if value specified", async () => { + const row = await config.api.row.save(table._id!, { + description: "specified description", + }) + expect(row.description).toEqual("specified description") + }) + + it("can bind the current user", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.STRING, + default: `{{ [Current User]._id }}`, + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + expect(row.user).toEqual(config.getUser()._id) + }) + + it("cannot access current user password", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + user: { + name: "user", + type: FieldType.STRING, + default: `{{ user.password }}`, + }, + }, + }) + ) + const row = await config.api.row.save(table._id!, {}) + // For some reason it's null for internal tables, and undefined for + // external. + expect(row.user == null).toBe(true) + }) + }) + + describe("number column", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + age: { + name: "age", + type: FieldType.NUMBER, + default: `{{ sum 10 10 5 }}`, + }, + }, + }) + ) + }) + + it("can use bindings in default values", async () => { + const row = await config.api.row.save(table._id!, {}) + expect(row.age).toEqual(25) + }) + + describe("invalid default value", () => { + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + age: { + name: "age", + type: FieldType.NUMBER, + default: `{{ capitalize "invalid" }}`, + }, + }, + }) + ) + }) + + it("throws an error when invalid default value", async () => { + await config.api.row.save( + table._id!, + {}, + { + status: 400, + body: { + message: + "Invalid default value for field 'age' - Invalid number value \"Invalid\"", + }, + } + ) + }) + }) + }) }) - ) - }) - - it("can drop existing relationship", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], }) - row = await config.api.row.save(table._id!, { - ...row, - related1: [], - related2: [relatedRows[2]._id!], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - related2: [ - { - _id: relatedRows[2]._id, - primaryDisplay: relatedRows[2].name, - }, - ], - }) - ) - expect(row.related1).toBeUndefined() - }) - - it("can drop both relationships", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - row = await config.api.row.save(table._id!, { - ...row, - related1: [], - related2: [], - }) - - expect(row).toEqual( - expect.objectContaining({ - name: "test", - }) - ) - expect(row.related1).toBeUndefined() - expect(row.related2).toBeUndefined() - }) - }) - }) - - describe("patch", () => { - let otherTable: Table - - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - otherTable = await config.api.table.save( - defaultTable({ - schema: { - relationship: { - name: "relationship", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: table._id!, - fieldName: "relationship", - }, - }, - }) - ) - }) - - it("should update only the fields that are supplied", async () => { - const existing = await config.api.row.save(table._id!, {}) - - const rowUsage = await getRowUsage() - - const row = await config.api.row.patch(table._id!, { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: "Updated Name", - }) - - expect(row.name).toEqual("Updated Name") - expect(row.description).toEqual(existing.description) - - const savedRow = await config.api.row.get(table._id!, row._id!) - - expect(savedRow.description).toEqual(existing.description) - expect(savedRow.name).toEqual("Updated Name") - await assertRowUsage(rowUsage) - }) - - it("should update only the fields that are supplied and emit the correct oldRow", async () => { - let beforeRow = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - const opts = { - name: "row:update", - matchFn: (event: UpdatedRowEventEmitter) => - event.row._id === beforeRow._id, - } - const event = await waitForEvent(opts, async () => { - await config.api.row.patch(table._id!, { - _id: beforeRow._id!, - _rev: beforeRow._rev!, - tableId: table._id!, - name: "Updated Name", - }) - }) - - expect(event.oldRow).toBeDefined() - expect(event.oldRow.name).toEqual("test") - expect(event.row.name).toEqual("Updated Name") - expect(event.oldRow.description).toEqual(beforeRow.description) - expect(event.row.description).toEqual(beforeRow.description) - }) - - it("should throw an error when given improper types", async () => { - const existing = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - await config.api.row.patch( - table._id!, - { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - name: 1, - }, - { status: 400 } - ) - - await assertRowUsage(rowUsage) - }) - - it("should not overwrite links if those links are not set", async () => { - let linkField: FieldSchema = { - type: FieldType.LINK, - name: "", - fieldName: "", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: InternalTable.USER_METADATA, - } - - let table = await config.api.table.save({ - name: "TestTable", - type: "table", - sourceType: TableSourceType.INTERNAL, - sourceId: INTERNAL_TABLE_SOURCE_ID, - schema: { - user1: { ...linkField, name: "user1", fieldName: "user1" }, - user2: { ...linkField, name: "user2", fieldName: "user2" }, - }, - }) - - let user1 = await config.createUser() - let user2 = await config.createUser() - - let row = await config.api.row.save(table._id!, { - user1: [{ _id: user1._id }], - user2: [{ _id: user2._id }], - }) - - let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.user1[0]._id).toEqual(user1._id) - expect(getResp.user2[0]._id).toEqual(user2._id) - - let patchResp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: table._id!, - user1: [{ _id: user2._id }], - }) - expect(patchResp.user1[0]._id).toEqual(user2._id) - expect(patchResp.user2[0]._id).toEqual(user2._id) - - getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.user1[0]._id).toEqual(user2._id) - expect(getResp.user2[0]._id).toEqual(user2._id) - }) - - it("should be able to remove a relationship from many side", async () => { - const row = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - }) - const row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - }) - const { _id } = await config.api.row.save(table._id!, { - relationship: [{ _id: row._id }, { _id: row2._id }], - }) - const relatedRow = await config.api.row.get(table._id!, _id!, { - status: 200, - }) - expect(relatedRow.relationship.length).toEqual(2) - await config.api.row.save(table._id!, { - ...relatedRow, - relationship: [{ _id: row._id }], - }) - const afterRelatedRow = await config.api.row.get(table._id!, _id!, { - status: 200, - }) - expect(afterRelatedRow.relationship.length).toEqual(1) - expect(afterRelatedRow.relationship[0]._id).toEqual(row._id) - }) - - it("should be able to update relationships when both columns are same name", async () => { - let row = await config.api.row.save(table._id!, { - name: "test", - description: "test", - }) - let row2 = await config.api.row.save(otherTable._id!, { - name: "test", - description: "test", - relationship: [row._id], - }) - row = await config.api.row.get(table._id!, row._id!) - expect(row.relationship.length).toBe(1) - const resp = await config.api.row.patch(table._id!, { - _id: row._id!, - _rev: row._rev!, - tableId: row.tableId!, - name: "test2", - relationship: [row2._id], - }) - expect(resp.relationship.length).toBe(1) - }) - - !isInternal && - // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing - // to identity columns. This is not something Budibase does currently. - !isMSSQL && - it("should support updating fields that are part of a composite key", async () => { - const tableRequest = saveTableRequest({ - primary: ["number", "string"], - schema: { - string: { - type: FieldType.STRING, - name: "string", - }, - number: { - type: FieldType.NUMBER, - name: "number", - }, - }, - }) - - delete tableRequest.schema.id - - const table = await config.api.table.save(tableRequest) - - const stringValue = generator.word() - - // MySQL and MariaDB auto-increment fields have a minimum value of 1. If - // you try to save a row with a value of 0 it will use 1 instead. - const naturalValue = generator.integer({ min: 1, max: 1000 }) - - const existing = await config.api.row.save(table._id!, { - string: stringValue, - number: naturalValue, - }) - - expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`) - - const row = await config.api.row.patch(table._id!, { - _id: existing._id!, - _rev: existing._rev!, - tableId: table._id!, - string: stringValue, - number: 1500, - }) - - expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`) - }) - }) - - describe("destroy", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should be able to delete a row", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow], - }) - expect(res[0]._id).toEqual(createdRow._id) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it("should be able to delete a row with ID only", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow._id!], - }) - expect(res[0]._id).toEqual(createdRow._id) - expect(res[0].tableId).toEqual(table._id!) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it("should be able to bulk delete rows, including a row that doesn't exist", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const createdRow2 = await config.api.row.save(table._id!, {}) - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [createdRow, createdRow2, { _id: "9999999" }], - }) - - expect(res.map(r => r._id)).toEqual( - expect.arrayContaining([createdRow._id, createdRow2._id]) - ) - expect(res.length).toEqual(2) - }) - - describe("relations to same table", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedTable = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - const relatedTableId = relatedTable._id! - table = await config.api.table.save( - defaultTable({ - schema: { - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTableId, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }, - }) - ) - relatedRows = await Promise.all([ - config.api.row.save(relatedTableId, { name: "foo" }), - config.api.row.save(relatedTableId, { name: "bar" }), - config.api.row.save(relatedTableId, { name: "baz" }), - config.api.row.save(relatedTableId, { name: "boo" }), - ]) - }) - - it("can delete rows with both relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }) - - await config.api.row.delete(table._id!, { _id: row._id! }) - - await config.api.row.get(table._id!, row._id!, { status: 404 }) - }) - - it("can delete rows with empty relationships", async () => { - const row = await config.api.row.save(table._id!, { - name: "test", - related1: [], - related2: [], - }) - - await config.api.row.delete(table._id!, { _id: row._id! }) - - await config.api.row.get(table._id!, row._id!, { status: 404 }) - }) - }) - }) - - describe("validate", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should return no errors on valid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: "ivan" }) - - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - await assertRowUsage(rowUsage) - }) - - it("should errors on invalid row", async () => { - const rowUsage = await getRowUsage() - - const res = await config.api.row.validate(table._id!, { name: 1 }) - - if (isInternal) { - expect(res.valid).toBe(false) - expect(Object.keys(res.errors)).toEqual(["name"]) - } else { - // Validation for external is not implemented, so it will always return valid - expect(res.valid).toBe(true) - expect(Object.keys(res.errors)).toEqual([]) - } - await assertRowUsage(rowUsage) - }) - }) - - describe("bulkDelete", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) - - it("should be able to delete a bulk set of rows", async () => { - const row1 = await config.api.row.save(table._id!, {}) - const row2 = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [row1, row2], - }) - - expect(res.length).toEqual(2) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) - }) - - it("should be able to delete a variety of row set types", async () => { - const [row1, row2, row3] = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - const rowUsage = await getRowUsage() - - const res = await config.api.row.bulkDelete(table._id!, { - rows: [row1, row2._id!, { _id: row3._id }], - }) - - expect(res.length).toEqual(3) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage) - }) - - it("should accept a valid row object and delete the row", async () => { - const row1 = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - - const res = await config.api.row.delete(table._id!, row1 as DeleteRow) - - expect(res.id).toEqual(row1._id) - await config.api.row.get(table._id!, row1._id!, { status: 404 }) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - }) - - it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( - "should ignore malformed/invalid delete request: %s", - async (request: any) => { - const rowUsage = await getRowUsage() - - await config.api.row.delete(table._id!, request, { - status: 400, - body: { - message: "Invalid delete rows request", - }, - }) - - await assertRowUsage(rowUsage) - } - ) - }) - - describe("bulkImport", () => { - isInternal && - it("should update Auto ID field after bulk import", async () => { - const table = await config.api.table.save( - saveTableRequest({ - primary: ["autoId"], - schema: { - autoId: { - name: "autoId", - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - autocolumn: true, - constraints: { - type: "number", - presence: false, + describe("relations to same table", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, }, - }, - }, - }) - ) - - let row = await config.api.row.save(table._id!, {}) - expect(row.autoId).toEqual(1) - - await config.api.row.bulkImport(table._id!, { - rows: [{ autoId: 2 }], - }) - - row = await config.api.row.save(table._id!, {}) - expect(row.autoId).toEqual(3) - }) - - isInternal && - it("should reject bulkImporting relationship fields", async () => { - const table1 = await config.api.table.save(saveTableRequest()) - const table2 = await config.api.table.save( - saveTableRequest({ - schema: { - relationship: { - name: "relationship", - type: FieldType.LINK, - tableId: table1._id!, - relationshipType: RelationshipType.ONE_TO_MANY, - fieldName: "relationship", - }, - }, - }) - ) - - const table1Row1 = await config.api.row.save(table1._id!, {}) - await config.api.row.bulkImport( - table2._id!, - { - rows: [{ relationship: [table1Row1._id!] }], - }, - { - status: 400, - body: { - message: - 'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"', - }, - } - ) - }) - - it("should be able to bulkImport rows", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const rowUsage = await getRowUsage() - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(2) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1") - expect(rows[0].description).toEqual("Row 1 description") - expect(rows[1].name).toEqual("Row 2") - expect(rows[1].description).toEqual("Row 2 description") - - await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) - }) - - isInternal && - it("should be able to update existing rows on bulkImport", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const existingRow = await config.api.row.save(table._id!, { - name: "Existing row", - description: "Existing description", - }) - - const rowUsage = await getRowUsage() - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { ...existingRow, name: "Updated existing row" }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], - identifierFields: ["_id"], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1") - expect(rows[0].description).toEqual("Row 1 description") - expect(rows[1].name).toEqual("Row 2") - expect(rows[1].description).toEqual("Row 2 description") - expect(rows[2].name).toEqual("Updated existing row") - expect(rows[2].description).toEqual("Existing description") - - await assertRowUsage(rowUsage + 2) - }) - - isInternal && - it("should create new rows if not identifierFields are provided", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, - }) - ) - - const existingRow = await config.api.row.save(table._id!, { - name: "Existing row", - description: "Existing description", - }) - - const rowUsage = await getRowUsage() - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "Row 1", - description: "Row 1 description", - }, - { ...existingRow, name: "Updated existing row" }, - { - name: "Row 2", - description: "Row 2 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(4) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Existing row") - expect(rows[0].description).toEqual("Existing description") - expect(rows[1].name).toEqual("Row 1") - expect(rows[1].description).toEqual("Row 1 description") - expect(rows[2].name).toEqual("Row 2") - expect(rows[2].description).toEqual("Row 2 description") - expect(rows[3].name).toEqual("Updated existing row") - expect(rows[3].description).toEqual("Existing description") - - await assertRowUsage(rowUsage + 3) - }) - - // Upserting isn't yet supported in MSSQL / Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - it("should be able to update existing rows with bulkImport", async () => { - const table = await config.api.table.save( - saveTableRequest({ - primary: ["userId"], - schema: { - userId: { - type: FieldType.NUMBER, - name: "userId", - constraints: { - presence: true, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, }, - }, - name: { - type: FieldType.STRING, - name: "name", - }, - description: { - type: FieldType.STRING, - name: "description", - }, - }, + }) + ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) }) - ) - const row1 = await config.api.row.save(table._id!, { - userId: 1, - name: "Row 1", - description: "Row 1 description", + it("can create rows with both relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related1: [ + { + _id: relatedRows[0]._id, + primaryDisplay: relatedRows[0].name, + }, + ], + related2: [ + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ], + }) + ) + }) + + it("can create rows with no relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + }) + + expect(row.related1).toBeUndefined() + expect(row.related2).toBeUndefined() + }) + + it("can create rows with only one relationships field", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [], + related2: [relatedRows[1]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related2: [ + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ], + }) + ) + expect(row.related1).toBeUndefined() + }) }) - - const row2 = await config.api.row.save(table._id!, { - userId: 2, - name: "Row 2", - description: "Row 2 description", - }) - - await config.api.row.bulkImport(table._id!, { - identifierFields: ["userId"], - rows: [ - { - userId: row1.userId, - name: "Row 1 updated", - description: "Row 1 description updated", - }, - { - userId: row2.userId, - name: "Row 2 updated", - description: "Row 2 description updated", - }, - { - userId: 3, - name: "Row 3", - description: "Row 3 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1 updated") - expect(rows[0].description).toEqual("Row 1 description updated") - expect(rows[1].name).toEqual("Row 2 updated") - expect(rows[1].description).toEqual("Row 2 description updated") - expect(rows[2].name).toEqual("Row 3") - expect(rows[2].description).toEqual("Row 3 description") }) - // Upserting isn't yet supported in MSSQL or Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - !isInternal && - it("should be able to update existing rows with composite primary keys with bulkImport", async () => { - const tableName = uuid.v4() - await client?.schema.createTable(tableName, table => { - table.integer("companyId") - table.integer("userId") - table.string("name") - table.string("description") - table.primary(["companyId", "userId"]) + describe("get", () => { + it("reads an existing row successfully", async () => { + const existing = await config.api.row.save(table._id!, {}) + + const res = await config.api.row.get(table._id!, existing._id!) + + expect(res).toEqual({ + ...existing, + ...defaultRowFields, + }) }) - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - const table = resp.datasource.entities![tableName] - - const row1 = await config.api.row.save(table._id!, { - companyId: 1, - userId: 1, - name: "Row 1", - description: "Row 1 description", + it("returns 404 when row does not exist", async () => { + const table = await config.api.table.save(defaultTable()) + await config.api.row.save(table._id!, {}) + await config.api.row.get(table._id!, "1234567", { + status: 404, + }) }) - const row2 = await config.api.row.save(table._id!, { - companyId: 1, - userId: 2, - name: "Row 2", - description: "Row 2 description", - }) + isInternal && + it("can search row from user table", async () => { + const res = await config.api.row.get( + InternalTables.USER_METADATA, + config.userMetadataId! + ) - await config.api.row.bulkImport(table._id!, { - identifierFields: ["companyId", "userId"], - rows: [ - { - companyId: 1, - userId: row1.userId, - name: "Row 1 updated", - description: "Row 1 description updated", - }, - { - companyId: 1, - userId: row2.userId, - name: "Row 2 updated", - description: "Row 2 description updated", - }, - { - companyId: 1, - userId: 3, - name: "Row 3", - description: "Row 3 description", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(3) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Row 1 updated") - expect(rows[0].description).toEqual("Row 1 description updated") - expect(rows[1].name).toEqual("Row 2 updated") - expect(rows[1].description).toEqual("Row 2 description updated") - expect(rows[2].name).toEqual("Row 3") - expect(rows[2].description).toEqual("Row 3 description") + expect(res).toEqual({ + ...config.getUser(), + _id: config.userMetadataId!, + _rev: expect.any(String), + roles: undefined, + roleId: "ADMIN", + tableId: InternalTables.USER_METADATA, + }) + }) }) - // Upserting isn't yet supported in MSSQL/Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - !isInternal && - it("should be able to update existing rows an autoID primary key", async () => { - const tableName = uuid.v4() - await client!.schema.createTable(tableName, table => { - table.increments("userId").primary() - table.string("name") + describe("fetch", () => { + it("fetches all rows for given tableId", async () => { + const table = await config.api.table.save(defaultTable()) + const rows = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + + const res = await config.api.row.fetch(table._id!) + expect(res.map(r => r._id)).toEqual( + expect.arrayContaining(rows.map(r => r._id)) + ) }) - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, + it("returns 404 when table does not exist", async () => { + await config.api.row.fetch("1234567", { status: 404 }) }) - const table = resp.datasource.entities![tableName] - - const row1 = await config.api.row.save(table._id!, { - name: "Clare", - }) - - const row2 = await config.api.row.save(table._id!, { - name: "Jeff", - }) - - await config.api.row.bulkImport(table._id!, { - identifierFields: ["userId"], - rows: [ - { - userId: row1.userId, - name: "Clare updated", - }, - { - userId: row2.userId, - name: "Jeff updated", - }, - ], - }) - - const rows = await config.api.row.fetch(table._id!) - expect(rows.length).toEqual(2) - - rows.sort((a, b) => a.name.localeCompare(b.name)) - expect(rows[0].name).toEqual("Clare updated") - expect(rows[1].name).toEqual("Jeff updated") }) - }) - describe("enrich", () => { - beforeAll(async () => { - table = await config.api.table.save(defaultTable()) - }) + describe("update", () => { + it("updates an existing row successfully", async () => { + const existing = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() - it("should allow enriching some linked rows", async () => { - const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( - config.getTenantId(), - async () => { - const linkedTable = await config.api.table.save( + const res = await config.api.row.save(table._id!, { + _id: existing._id, + _rev: existing._rev, + name: "Updated Name", + }) + + expect(res.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + !isInternal && + it("can update a row on an external table with a primary key", async () => { + const tableName = uuid.v4().substring(0, 10) + await client!.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = res.datasource.entities![tableName] + + const row = await config.api.row.save(table._id!, { + id: 1, + name: "Row 1", + }) + + const updatedRow = await config.api.row.save(table._id!, { + _id: row._id!, + name: "Row 1 Updated", + }) + + expect(updatedRow.name).toEqual("Row 1 Updated") + + const rows = await config.api.row.fetch(table._id!) + expect(rows).toHaveLength(1) + }) + + describe("relations to same table", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }, + }) + ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) + }) + + it("can edit rows with both relationships", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [relatedRows[0]._id!, relatedRows[1]._id!], + related2: [relatedRows[2]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related1: expect.arrayContaining([ + { + _id: relatedRows[0]._id, + primaryDisplay: relatedRows[0].name, + }, + { + _id: relatedRows[1]._id, + primaryDisplay: relatedRows[1].name, + }, + ]), + related2: [ + { + _id: relatedRows[2]._id, + primaryDisplay: relatedRows[2].name, + }, + ], + }) + ) + }) + + it("can drop existing relationship", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [], + related2: [relatedRows[2]._id!], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + related2: [ + { + _id: relatedRows[2]._id, + primaryDisplay: relatedRows[2].name, + }, + ], + }) + ) + expect(row.related1).toBeUndefined() + }) + + it("can drop both relationships", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + row = await config.api.row.save(table._id!, { + ...row, + related1: [], + related2: [], + }) + + expect(row).toEqual( + expect.objectContaining({ + name: "test", + }) + ) + expect(row.related1).toBeUndefined() + expect(row.related2).toBeUndefined() + }) + }) + }) + + describe("patch", () => { + let otherTable: Table + + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + otherTable = await config.api.table.save( defaultTable({ schema: { - link: { - name: "link", - fieldName: "link", - type: FieldType.LINK, + relationship: { + name: "relationship", relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, tableId: table._id!, + fieldName: "relationship", }, }, }) ) - const firstRow = await config.api.row.save(table._id!, { - name: "Test Contact", - description: "original description", - }) - const secondRow = await config.api.row.save(linkedTable._id!, { - name: "Test 2", - description: "og desc", - link: [{ _id: firstRow._id }], - }) - return { linkedTable, firstRow, secondRow } - } - ) - const rowUsage = await getRowUsage() + }) - // test basic enrichment - const resBasic = await config.api.row.get( - linkedTable._id!, - secondRow._id! - ) - expect(resBasic.link.length).toBe(1) - expect(resBasic.link[0]).toEqual({ - _id: firstRow._id, - primaryDisplay: firstRow.name, + it("should update only the fields that are supplied", async () => { + const existing = await config.api.row.save(table._id!, {}) + + const rowUsage = await getRowUsage() + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: "Updated Name", + }) + + expect(row.name).toEqual("Updated Name") + expect(row.description).toEqual(existing.description) + + const savedRow = await config.api.row.get(table._id!, row._id!) + + expect(savedRow.description).toEqual(existing.description) + expect(savedRow.name).toEqual("Updated Name") + await assertRowUsage(rowUsage) + }) + + it("should update only the fields that are supplied and emit the correct oldRow", async () => { + let beforeRow = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + const opts = { + name: "row:update", + matchFn: (event: UpdatedRowEventEmitter) => + event.row._id === beforeRow._id, + } + const event = await waitForEvent(opts, async () => { + await config.api.row.patch(table._id!, { + _id: beforeRow._id!, + _rev: beforeRow._rev!, + tableId: table._id!, + name: "Updated Name", + }) + }) + + expect(event.oldRow).toBeDefined() + expect(event.oldRow.name).toEqual("test") + expect(event.row.name).toEqual("Updated Name") + expect(event.oldRow.description).toEqual(beforeRow.description) + expect(event.row.description).toEqual(beforeRow.description) + }) + + it("should throw an error when given improper types", async () => { + const existing = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + await config.api.row.patch( + table._id!, + { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + name: 1, + }, + { status: 400 } + ) + + await assertRowUsage(rowUsage) + }) + + it("should not overwrite links if those links are not set", async () => { + let linkField: FieldSchema = { + type: FieldType.LINK, + name: "", + fieldName: "", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: InternalTable.USER_METADATA, + } + + let table = await config.api.table.save({ + name: "TestTable", + type: "table", + sourceType: TableSourceType.INTERNAL, + sourceId: INTERNAL_TABLE_SOURCE_ID, + schema: { + user1: { ...linkField, name: "user1", fieldName: "user1" }, + user2: { ...linkField, name: "user2", fieldName: "user2" }, + }, + }) + + let user1 = await config.createUser() + let user2 = await config.createUser() + + let row = await config.api.row.save(table._id!, { + user1: [{ _id: user1._id }], + user2: [{ _id: user2._id }], + }) + + let getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.user1[0]._id).toEqual(user1._id) + expect(getResp.user2[0]._id).toEqual(user2._id) + + let patchResp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: table._id!, + user1: [{ _id: user2._id }], + }) + expect(patchResp.user1[0]._id).toEqual(user2._id) + expect(patchResp.user2[0]._id).toEqual(user2._id) + + getResp = await config.api.row.get(table._id!, row._id!) + expect(getResp.user1[0]._id).toEqual(user2._id) + expect(getResp.user2[0]._id).toEqual(user2._id) + }) + + it("should be able to remove a relationship from many side", async () => { + const row = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + }) + const row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + }) + const { _id } = await config.api.row.save(table._id!, { + relationship: [{ _id: row._id }, { _id: row2._id }], + }) + const relatedRow = await config.api.row.get(table._id!, _id!, { + status: 200, + }) + expect(relatedRow.relationship.length).toEqual(2) + await config.api.row.save(table._id!, { + ...relatedRow, + relationship: [{ _id: row._id }], + }) + const afterRelatedRow = await config.api.row.get(table._id!, _id!, { + status: 200, + }) + expect(afterRelatedRow.relationship.length).toEqual(1) + expect(afterRelatedRow.relationship[0]._id).toEqual(row._id) + }) + + it("should be able to update relationships when both columns are same name", async () => { + let row = await config.api.row.save(table._id!, { + name: "test", + description: "test", + }) + let row2 = await config.api.row.save(otherTable._id!, { + name: "test", + description: "test", + relationship: [row._id], + }) + row = await config.api.row.get(table._id!, row._id!) + expect(row.relationship.length).toBe(1) + const resp = await config.api.row.patch(table._id!, { + _id: row._id!, + _rev: row._rev!, + tableId: row.tableId!, + name: "test2", + relationship: [row2._id], + }) + expect(resp.relationship.length).toBe(1) + }) + + !isInternal && + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. + !isMSSQL && + it("should support updating fields that are part of a composite key", async () => { + const tableRequest = saveTableRequest({ + primary: ["number", "string"], + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + number: { + type: FieldType.NUMBER, + name: "number", + }, + }, + }) + + delete tableRequest.schema.id + + const table = await config.api.table.save(tableRequest) + + const stringValue = generator.word() + + // MySQL and MariaDB auto-increment fields have a minimum value of 1. If + // you try to save a row with a value of 0 it will use 1 instead. + const naturalValue = generator.integer({ min: 1, max: 1000 }) + + const existing = await config.api.row.save(table._id!, { + string: stringValue, + number: naturalValue, + }) + + expect(existing._id).toEqual( + `%5B${naturalValue}%2C'${stringValue}'%5D` + ) + + const row = await config.api.row.patch(table._id!, { + _id: existing._id!, + _rev: existing._rev!, + tableId: table._id!, + string: stringValue, + number: 1500, + }) + + expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`) + }) }) - // test full enrichment - const resEnriched = await config.api.row.getEnriched( - linkedTable._id!, - secondRow._id! - ) - expect(resEnriched.link.length).toBe(1) - expect(resEnriched.link[0]._id).toBe(firstRow._id) - expect(resEnriched.link[0].name).toBe("Test Contact") - expect(resEnriched.link[0].description).toBe("original description") - await assertRowUsage(rowUsage) - }) - }) + describe("destroy", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) - isInternal && - describe("attachments and signatures", () => { - const coreAttachmentEnrichment = async ( - schema: TableSchema, - field: string, - attachmentCfg: string | string[] - ) => { - const testTable = await config.api.table.save( - defaultTable({ - schema, + it("should be able to delete a row", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow], }) - ) - const attachmentToStoreKey = (attachmentId: string) => { - return { - key: `${config.getAppId()}/attachments/${attachmentId}`, + expect(res[0]._id).toEqual(createdRow._id) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it("should be able to delete a row with ID only", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow._id!], + }) + expect(res[0]._id).toEqual(createdRow._id) + expect(res[0].tableId).toEqual(table._id!) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it("should be able to bulk delete rows, including a row that doesn't exist", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const createdRow2 = await config.api.row.save(table._id!, {}) + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow, createdRow2, { _id: "9999999" }], + }) + + expect(res.map(r => r._id)).toEqual( + expect.arrayContaining([createdRow._id, createdRow2._id]) + ) + expect(res.length).toEqual(2) + }) + + describe("relations to same table", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedTable = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + const relatedTableId = relatedTable._id! + table = await config.api.table.save( + defaultTable({ + schema: { + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTableId, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }, + }) + ) + relatedRows = await Promise.all([ + config.api.row.save(relatedTableId, { name: "foo" }), + config.api.row.save(relatedTableId, { name: "bar" }), + config.api.row.save(relatedTableId, { name: "baz" }), + config.api.row.save(relatedTableId, { name: "boo" }), + ]) + }) + + it("can delete rows with both relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }) + + await config.api.row.delete(table._id!, { _id: row._id! }) + + await config.api.row.get(table._id!, row._id!, { status: 404 }) + }) + + it("can delete rows with empty relationships", async () => { + const row = await config.api.row.save(table._id!, { + name: "test", + related1: [], + related2: [], + }) + + await config.api.row.delete(table._id!, { _id: row._id! }) + + await config.api.row.get(table._id!, row._id!, { status: 404 }) + }) + }) + }) + + describe("validate", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + it("should return no errors on valid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { + name: "ivan", + }) + + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) + await assertRowUsage(rowUsage) + }) + + it("should errors on invalid row", async () => { + const rowUsage = await getRowUsage() + + const res = await config.api.row.validate(table._id!, { name: 1 }) + + if (isInternal) { + expect(res.valid).toBe(false) + expect(Object.keys(res.errors)).toEqual(["name"]) + } else { + // Validation for external is not implemented, so it will always return valid + expect(res.valid).toBe(true) + expect(Object.keys(res.errors)).toEqual([]) } - } - const draftRow = { - name: "test", - description: "test", - [field]: - typeof attachmentCfg === "string" - ? attachmentToStoreKey(attachmentCfg) - : attachmentCfg.map(attachmentToStoreKey), - tableId: testTable._id, - } - const row = await config.api.row.save(testTable._id!, draftRow) + await assertRowUsage(rowUsage) + }) + }) - await withEnv({ SELF_HOSTED: "true" }, async () => { - return context.doInAppContext(config.getAppId(), async () => { - const enriched: Row[] = await outputProcessing(testTable, [row]) - const [targetRow] = enriched - const attachmentEntries = Array.isArray(targetRow[field]) - ? targetRow[field] - : [targetRow[field]] + describe("bulkDelete", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) - for (const entry of attachmentEntries) { - const attachmentId = entry.key.split("/").pop() - expect(entry.url.split("?")[0]).toBe( - `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` + it("should be able to delete a bulk set of rows", async () => { + const row1 = await config.api.row.save(table._id!, {}) + const row2 = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2], + }) + + expect(res.length).toEqual(2) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) + }) + + it("should be able to delete a variety of row set types", async () => { + const [row1, row2, row3] = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + const rowUsage = await getRowUsage() + + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2._id!, { _id: row3._id }], + }) + + expect(res.length).toEqual(3) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage) + }) + + it("should accept a valid row object and delete the row", async () => { + const row1 = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + + const res = await config.api.row.delete(table._id!, row1 as DeleteRow) + + expect(res.id).toEqual(row1._id) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + }) + + it.each([{ not: "valid" }, { rows: 123 }, "invalid"])( + "should ignore malformed/invalid delete request: %s", + async (request: any) => { + const rowUsage = await getRowUsage() + + await config.api.row.delete(table._id!, request, { + status: 400, + body: { + message: "Invalid delete rows request", + }, + }) + + await assertRowUsage(rowUsage) + } + ) + }) + + describe("bulkImport", () => { + isInternal && + it("should update Auto ID field after bulk import", async () => { + const table = await config.api.table.save( + saveTableRequest({ + primary: ["autoId"], + schema: { + autoId: { + name: "autoId", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + constraints: { + type: "number", + presence: false, + }, + }, + }, + }) + ) + + let row = await config.api.row.save(table._id!, {}) + expect(row.autoId).toEqual(1) + + await config.api.row.bulkImport(table._id!, { + rows: [{ autoId: 2 }], + }) + + row = await config.api.row.save(table._id!, {}) + expect(row.autoId).toEqual(3) + }) + + isInternal && + it("should reject bulkImporting relationship fields", async () => { + const table1 = await config.api.table.save(saveTableRequest()) + const table2 = await config.api.table.save( + saveTableRequest({ + schema: { + relationship: { + name: "relationship", + type: FieldType.LINK, + tableId: table1._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + fieldName: "relationship", + }, + }, + }) + ) + + const table1Row1 = await config.api.row.save(table1._id!, {}) + await config.api.row.bulkImport( + table2._id!, + { + rows: [{ relationship: [table1Row1._id!] }], + }, + { + status: 400, + body: { + message: + 'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"', + }, + } + ) + }) + + it("should be able to bulkImport rows", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + + await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage) + }) + + isInternal && + it("should be able to update existing rows on bulkImport", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", + }) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { ...existingRow, name: "Updated existing row" }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + identifierFields: ["_id"], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1") + expect(rows[0].description).toEqual("Row 1 description") + expect(rows[1].name).toEqual("Row 2") + expect(rows[1].description).toEqual("Row 2 description") + expect(rows[2].name).toEqual("Updated existing row") + expect(rows[2].description).toEqual("Existing description") + + await assertRowUsage(rowUsage + 2) + }) + + isInternal && + it("should create new rows if not identifierFields are provided", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const existingRow = await config.api.row.save(table._id!, { + name: "Existing row", + description: "Existing description", + }) + + const rowUsage = await getRowUsage() + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Row 1", + description: "Row 1 description", + }, + { ...existingRow, name: "Updated existing row" }, + { + name: "Row 2", + description: "Row 2 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(4) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Existing row") + expect(rows[0].description).toEqual("Existing description") + expect(rows[1].name).toEqual("Row 1") + expect(rows[1].description).toEqual("Row 1 description") + expect(rows[2].name).toEqual("Row 2") + expect(rows[2].description).toEqual("Row 2 description") + expect(rows[3].name).toEqual("Updated existing row") + expect(rows[3].description).toEqual("Existing description") + + await assertRowUsage(rowUsage + 3) + }) + + // Upserting isn't yet supported in MSSQL / Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + it("should be able to update existing rows with bulkImport", async () => { + const table = await config.api.table.save( + saveTableRequest({ + primary: ["userId"], + schema: { + userId: { + type: FieldType.NUMBER, + name: "userId", + constraints: { + presence: true, + }, + }, + name: { + type: FieldType.STRING, + name: "name", + }, + description: { + type: FieldType.STRING, + name: "description", + }, + }, + }) + ) + + const row1 = await config.api.row.save(table._id!, { + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL or Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + !isInternal && + it("should be able to update existing rows with composite primary keys with bulkImport", async () => { + const tableName = uuid.v4() + await client?.schema.createTable(tableName, table => { + table.integer("companyId") + table.integer("userId") + table.string("name") + table.string("description") + table.primary(["companyId", "userId"]) + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 1, + name: "Row 1", + description: "Row 1 description", + }) + + const row2 = await config.api.row.save(table._id!, { + companyId: 1, + userId: 2, + name: "Row 2", + description: "Row 2 description", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["companyId", "userId"], + rows: [ + { + companyId: 1, + userId: row1.userId, + name: "Row 1 updated", + description: "Row 1 description updated", + }, + { + companyId: 1, + userId: row2.userId, + name: "Row 2 updated", + description: "Row 2 description updated", + }, + { + companyId: 1, + userId: 3, + name: "Row 3", + description: "Row 3 description", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(3) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Row 1 updated") + expect(rows[0].description).toEqual("Row 1 description updated") + expect(rows[1].name).toEqual("Row 2 updated") + expect(rows[1].description).toEqual("Row 2 description updated") + expect(rows[2].name).toEqual("Row 3") + expect(rows[2].description).toEqual("Row 3 description") + }) + + // Upserting isn't yet supported in MSSQL/Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + !isInternal && + it("should be able to update existing rows an autoID primary key", async () => { + const tableName = uuid.v4() + await client!.schema.createTable(tableName, table => { + table.increments("userId").primary() + table.string("name") + }) + + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) + const table = resp.datasource.entities![tableName] + + const row1 = await config.api.row.save(table._id!, { + name: "Clare", + }) + + const row2 = await config.api.row.save(table._id!, { + name: "Jeff", + }) + + await config.api.row.bulkImport(table._id!, { + identifierFields: ["userId"], + rows: [ + { + userId: row1.userId, + name: "Clare updated", + }, + { + userId: row2.userId, + name: "Jeff updated", + }, + ], + }) + + const rows = await config.api.row.fetch(table._id!) + expect(rows.length).toEqual(2) + + rows.sort((a, b) => a.name.localeCompare(b.name)) + expect(rows[0].name).toEqual("Clare updated") + expect(rows[1].name).toEqual("Jeff updated") + }) + }) + + describe("enrich", () => { + beforeAll(async () => { + table = await config.api.table.save(defaultTable()) + }) + + it("should allow enriching some linked rows", async () => { + const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant( + config.getTenantId(), + async () => { + const linkedTable = await config.api.table.save( + defaultTable({ + schema: { + link: { + name: "link", + fieldName: "link", + type: FieldType.LINK, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: table._id!, + }, + }, + }) ) + const firstRow = await config.api.row.save(table._id!, { + name: "Test Contact", + description: "original description", + }) + const secondRow = await config.api.row.save(linkedTable._id!, { + name: "Test 2", + description: "og desc", + link: [{ _id: firstRow._id }], + }) + return { linkedTable, firstRow, secondRow } + } + ) + const rowUsage = await getRowUsage() + + // test basic enrichment + const resBasic = await config.api.row.get( + linkedTable._id!, + secondRow._id! + ) + expect(resBasic.link.length).toBe(1) + expect(resBasic.link[0]).toEqual({ + _id: firstRow._id, + primaryDisplay: firstRow.name, + }) + + // test full enrichment + const resEnriched = await config.api.row.getEnriched( + linkedTable._id!, + secondRow._id! + ) + expect(resEnriched.link.length).toBe(1) + expect(resEnriched.link[0]._id).toBe(firstRow._id) + expect(resEnriched.link[0].name).toBe("Test Contact") + expect(resEnriched.link[0].description).toBe("original description") + await assertRowUsage(rowUsage) + }) + }) + + isInternal && + describe("attachments and signatures", () => { + const coreAttachmentEnrichment = async ( + schema: TableSchema, + field: string, + attachmentCfg: string | string[] + ) => { + const testTable = await config.api.table.save( + defaultTable({ + schema, + }) + ) + const attachmentToStoreKey = (attachmentId: string) => { + return { + key: `${config.getAppId()}/attachments/${attachmentId}`, + } + } + const draftRow = { + name: "test", + description: "test", + [field]: + typeof attachmentCfg === "string" + ? attachmentToStoreKey(attachmentCfg) + : attachmentCfg.map(attachmentToStoreKey), + tableId: testTable._id, + } + const row = await config.api.row.save(testTable._id!, draftRow) + + await withEnv({ SELF_HOSTED: "true" }, async () => { + return context.doInAppContext(config.getAppId(), async () => { + const enriched: Row[] = await outputProcessing(testTable, [row]) + const [targetRow] = enriched + const attachmentEntries = Array.isArray(targetRow[field]) + ? targetRow[field] + : [targetRow[field]] + + for (const entry of attachmentEntries) { + const attachmentId = entry.key.split("/").pop() + expect(entry.url.split("?")[0]).toBe( + `/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}` + ) + } + }) + }) + } + + it("should allow enriching single attachment rows", async () => { + await coreAttachmentEnrichment( + { + attachment: { + type: FieldType.ATTACHMENT_SINGLE, + name: "attachment", + constraints: { presence: false }, + }, + }, + "attachment", + `${uuid.v4()}.csv` + ) + }) + + it("should allow enriching attachment list rows", async () => { + await coreAttachmentEnrichment( + { + attachments: { + type: FieldType.ATTACHMENTS, + name: "attachments", + constraints: { type: "array", presence: false }, + }, + }, + "attachments", + [`${uuid.v4()}.csv`] + ) + }) + + it("should allow enriching signature rows", async () => { + await coreAttachmentEnrichment( + { + signature: { + type: FieldType.SIGNATURE_SINGLE, + name: "signature", + constraints: { presence: false }, + }, + }, + "signature", + `${uuid.v4()}.png` + ) + }) + }) + + describe("exportRows", () => { + beforeEach(async () => { + table = await config.api.table.save(defaultTable()) + }) + + isInternal && + it("should not export internal couchdb fields", async () => { + const existing = await config.api.row.save(table._id!, { + name: generator.guid(), + description: generator.paragraph(), + }) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + expect(Object.keys(row)).toEqual(["_id", "name", "description"]) + }) + + !isInternal && + it("should allow exporting all columns", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure all original columns were exported + expect(Object.keys(row).length).toBe(Object.keys(existing).length) + Object.keys(existing).forEach(key => { + expect(row[key]).toEqual(existing[key]) + }) + }) + + it("should allow exporting without filtering", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + expect(row._id).toEqual(existing._id) + }) + + it("should allow exporting only certain columns", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [existing._id!], + columns: ["_id"], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + + // Ensure only the _id column was exported + expect(Object.keys(row).length).toEqual(1) + expect(row._id).toEqual(existing._id) + }) + + it("should handle single quotes in row filtering", async () => { + const existing = await config.api.row.save(table._id!, {}) + const res = await config.api.row.exportRows(table._id!, { + rows: [`['${existing._id!}']`], + }) + const results = JSON.parse(res) + expect(results.length).toEqual(1) + const row = results[0] + expect(row._id).toEqual(existing._id) + }) + + it("should return an error if no table is found", async () => { + const existing = await config.api.row.save(table._id!, {}) + await config.api.row.exportRows( + "1234567", + { rows: [existing._id!] }, + RowExportFormat.JSON, + { status: 404 } + ) + }) + + // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing + // to identity columns. This is not something Budibase does currently. + !isMSSQL && + it("should handle filtering by composite primary keys", async () => { + const tableRequest = saveTableRequest({ + primary: ["number", "string"], + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + number: { + type: FieldType.NUMBER, + name: "number", + }, + }, + }) + delete tableRequest.schema.id + + const table = await config.api.table.save(tableRequest) + const toCreate = generator + .unique(() => generator.integer({ min: 0, max: 10000 }), 10) + .map(number => ({ + number, + string: generator.word({ length: 30 }), + })) + + const rows = await Promise.all( + toCreate.map(d => config.api.row.save(table._id!, d)) + ) + + const res = await config.api.row.exportRows(table._id!, { + rows: _.sampleSize(rows, 3).map(r => r._id!), + }) + const results = JSON.parse(res) + expect(results.length).toEqual(3) + }) + + describe("should allow exporting all column types", () => { + let tableId: string + let expectedRowData: Row + + beforeAll(async () => { + const fullSchema = setup.structures.fullSchemaWithoutLinks({ + allRequired: true, + }) + + const table = await config.api.table.save( + saveTableRequest({ + ...setup.structures.basicTable(), + schema: fullSchema, + primary: ["string"], + }) + ) + tableId = table._id! + + const rowValues: Record = { + [FieldType.STRING]: generator.guid(), + [FieldType.LONGFORM]: generator.paragraph(), + [FieldType.OPTIONS]: "option 2", + [FieldType.ARRAY]: ["options 2", "options 4"], + [FieldType.NUMBER]: generator.natural(), + [FieldType.BOOLEAN]: generator.bool(), + [FieldType.DATETIME]: generator.date().toISOString(), + [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], + [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), + [FieldType.FORMULA]: undefined, // generated field + [FieldType.AUTO]: undefined, // generated field + [FieldType.AI]: "LLM Output", + [FieldType.JSON]: { name: generator.guid() }, + [FieldType.INTERNAL]: generator.guid(), + [FieldType.BARCODEQR]: generator.guid(), + [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), + [FieldType.BIGINT]: generator.integer().toString(), + [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], + [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, + } + const row = await config.api.row.save(table._id!, rowValues) + expectedRowData = { + _id: row._id, + [FieldType.STRING]: rowValues[FieldType.STRING], + [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], + [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], + [FieldType.ARRAY]: rowValues[FieldType.ARRAY], + [FieldType.NUMBER]: rowValues[FieldType.NUMBER], + [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], + [FieldType.DATETIME]: rowValues[FieldType.DATETIME], + [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( + (a: any) => + expect.objectContaining({ + ...a, + url: expect.any(String), + }) + ), + [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.ATTACHMENT_SINGLE], + url: expect.any(String), + }), + [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, + [FieldType.AUTO]: expect.any(Number), + [FieldType.AI]: expect.any(String), + [FieldType.JSON]: rowValues[FieldType.JSON], + [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], + [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], + [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ + ...rowValues[FieldType.SIGNATURE_SINGLE], + url: expect.any(String), + }), + [FieldType.BIGINT]: rowValues[FieldType.BIGINT], + [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( + expect.objectContaining + ), + [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( + rowValues[FieldType.BB_REFERENCE_SINGLE] + ), } }) - }) - } - it("should allow enriching single attachment rows", async () => { - await coreAttachmentEnrichment( - { - attachment: { - type: FieldType.ATTACHMENT_SINGLE, - name: "attachment", - constraints: { presence: false }, - }, - }, - "attachment", - `${uuid.v4()}.csv` - ) - }) + it("as csv", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) - it("should allow enriching attachment list rows", async () => { - await coreAttachmentEnrichment( - { - attachments: { - type: FieldType.ATTACHMENTS, - name: "attachments", - constraints: { type: "array", presence: false }, - }, - }, - "attachments", - [`${uuid.v4()}.csv`] - ) - }) + const jsonResult = await config.api.table.csvToJson({ + csvString: exportedValue, + }) - it("should allow enriching signature rows", async () => { - await coreAttachmentEnrichment( - { - signature: { - type: FieldType.SIGNATURE_SINGLE, - name: "signature", - constraints: { presence: false }, - }, - }, - "signature", - `${uuid.v4()}.png` - ) - }) - }) + const stringified = (value: string) => + JSON.stringify(value).replace(/"/g, "'") - describe("exportRows", () => { - beforeEach(async () => { - table = await config.api.table.save(defaultTable()) - }) + const matchingObject = ( + key: string, + value: any, + isArray: boolean + ) => { + const objectMatcher = `{'${key}':'${value[key]}'.*?}` + if (isArray) { + return expect.stringMatching( + new RegExp(`^\\[${objectMatcher}\\]$`) + ) + } + return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) + } - isInternal && - it("should not export internal couchdb fields", async () => { - const existing = await config.api.row.save(table._id!, { - name: generator.guid(), - description: generator.paragraph(), - }) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - expect(Object.keys(row)).toEqual(["_id", "name", "description"]) - }) - - !isInternal && - it("should allow exporting all columns", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure all original columns were exported - expect(Object.keys(row).length).toBe(Object.keys(existing).length) - Object.keys(existing).forEach(key => { - expect(row[key]).toEqual(existing[key]) - }) - }) - - it("should allow exporting without filtering", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - expect(row._id).toEqual(existing._id) - }) - - it("should allow exporting only certain columns", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [existing._id!], - columns: ["_id"], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - - // Ensure only the _id column was exported - expect(Object.keys(row).length).toEqual(1) - expect(row._id).toEqual(existing._id) - }) - - it("should handle single quotes in row filtering", async () => { - const existing = await config.api.row.save(table._id!, {}) - const res = await config.api.row.exportRows(table._id!, { - rows: [`['${existing._id!}']`], - }) - const results = JSON.parse(res) - expect(results.length).toEqual(1) - const row = results[0] - expect(row._id).toEqual(existing._id) - }) - - it("should return an error if no table is found", async () => { - const existing = await config.api.row.save(table._id!, {}) - await config.api.row.exportRows( - "1234567", - { rows: [existing._id!] }, - RowExportFormat.JSON, - { status: 404 } - ) - }) - - // MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing - // to identity columns. This is not something Budibase does currently. - !isMSSQL && - it("should handle filtering by composite primary keys", async () => { - const tableRequest = saveTableRequest({ - primary: ["number", "string"], - schema: { - string: { - type: FieldType.STRING, - name: "string", - }, - number: { - type: FieldType.NUMBER, - name: "number", - }, - }, - }) - delete tableRequest.schema.id - - const table = await config.api.table.save(tableRequest) - const toCreate = generator - .unique(() => generator.integer({ min: 0, max: 10000 }), 10) - .map(number => ({ number, string: generator.word({ length: 30 }) })) - - const rows = await Promise.all( - toCreate.map(d => config.api.row.save(table._id!, d)) - ) - - const res = await config.api.row.exportRows(table._id!, { - rows: _.sampleSize(rows, 3).map(r => r._id!), - }) - const results = JSON.parse(res) - expect(results.length).toEqual(3) - }) - - describe("should allow exporting all column types", () => { - let tableId: string - let expectedRowData: Row - - beforeAll(async () => { - const fullSchema = setup.structures.fullSchemaWithoutLinks({ - allRequired: true, - }) - - const table = await config.api.table.save( - saveTableRequest({ - ...setup.structures.basicTable(), - schema: fullSchema, - primary: ["string"], + expect(jsonResult).toEqual([ + { + ...expectedRowData, + auto: expect.any(String), + array: stringified(expectedRowData["array"]), + attachment: matchingObject( + "key", + expectedRowData["attachment"][0].sample, + true + ), + attachment_single: matchingObject( + "key", + expectedRowData["attachment_single"].sample, + false + ), + boolean: stringified(expectedRowData["boolean"]), + json: stringified(expectedRowData["json"]), + number: stringified(expectedRowData["number"]), + signature_single: matchingObject( + "key", + expectedRowData["signature_single"].sample, + false + ), + bb_reference: matchingObject( + "_id", + expectedRowData["bb_reference"][0].sample, + true + ), + bb_reference_single: matchingObject( + "_id", + expectedRowData["bb_reference_single"].sample, + false + ), + ai: "LLM Output", + }, + ]) }) - ) - tableId = table._id! - const rowValues: Record = { - [FieldType.STRING]: generator.guid(), - [FieldType.LONGFORM]: generator.paragraph(), - [FieldType.OPTIONS]: "option 2", - [FieldType.ARRAY]: ["options 2", "options 4"], - [FieldType.NUMBER]: generator.natural(), - [FieldType.BOOLEAN]: generator.bool(), - [FieldType.DATETIME]: generator.date().toISOString(), - [FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()], - [FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(), - [FieldType.FORMULA]: undefined, // generated field - [FieldType.AUTO]: undefined, // generated field - [FieldType.AI]: "LLM Output", - [FieldType.JSON]: { name: generator.guid() }, - [FieldType.INTERNAL]: generator.guid(), - [FieldType.BARCODEQR]: generator.guid(), - [FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(), - [FieldType.BIGINT]: generator.integer().toString(), - [FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }], - [FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id }, - } - const row = await config.api.row.save(table._id!, rowValues) - expectedRowData = { - _id: row._id, - [FieldType.STRING]: rowValues[FieldType.STRING], - [FieldType.LONGFORM]: rowValues[FieldType.LONGFORM], - [FieldType.OPTIONS]: rowValues[FieldType.OPTIONS], - [FieldType.ARRAY]: rowValues[FieldType.ARRAY], - [FieldType.NUMBER]: rowValues[FieldType.NUMBER], - [FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN], - [FieldType.DATETIME]: rowValues[FieldType.DATETIME], - [FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map( - (a: any) => - expect.objectContaining({ - ...a, - url: expect.any(String), + it("as json", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual([expectedRowData]) + }) + + it("as json with schema", async () => { + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.JSON_WITH_SCHEMA + ) + + const json = JSON.parse(exportedValue) + expect(json).toEqual({ + schema: expect.any(Object), + rows: [expectedRowData], + }) + }) + + it("can handle csv-special characters in strings", async () => { + const badString = 'test":, wow", "test": "wow"' + const table = await config.api.table.save( + saveTableRequest({ + schema: { + string: { + type: FieldType.STRING, + name: "string", + }, + }, }) - ), - [FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({ - ...rowValues[FieldType.ATTACHMENT_SINGLE], - url: expect.any(String), - }), - [FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula, - [FieldType.AUTO]: expect.any(Number), - [FieldType.AI]: expect.any(String), - [FieldType.JSON]: rowValues[FieldType.JSON], - [FieldType.INTERNAL]: rowValues[FieldType.INTERNAL], - [FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR], - [FieldType.SIGNATURE_SINGLE]: expect.objectContaining({ - ...rowValues[FieldType.SIGNATURE_SINGLE], - url: expect.any(String), - }), - [FieldType.BIGINT]: rowValues[FieldType.BIGINT], - [FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map( - expect.objectContaining - ), - [FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining( - rowValues[FieldType.BB_REFERENCE_SINGLE] - ), - } - }) + ) - it("as csv", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.CSV - ) + await config.api.row.save(table._id!, { string: badString }) - const jsonResult = await config.api.table.csvToJson({ - csvString: exportedValue, - }) + const exportedValue = await config.api.row.exportRows( + table._id!, + { query: {} }, + RowExportFormat.CSV + ) - const stringified = (value: string) => - JSON.stringify(value).replace(/"/g, "'") - - const matchingObject = (key: string, value: any, isArray: boolean) => { - const objectMatcher = `{'${key}':'${value[key]}'.*?}` - if (isArray) { - return expect.stringMatching(new RegExp(`^\\[${objectMatcher}\\]$`)) - } - return expect.stringMatching(new RegExp(`^${objectMatcher}$`)) - } - - expect(jsonResult).toEqual([ - { - ...expectedRowData, - auto: expect.any(String), - array: stringified(expectedRowData["array"]), - attachment: matchingObject( - "key", - expectedRowData["attachment"][0].sample, - true - ), - attachment_single: matchingObject( - "key", - expectedRowData["attachment_single"].sample, - false - ), - boolean: stringified(expectedRowData["boolean"]), - json: stringified(expectedRowData["json"]), - number: stringified(expectedRowData["number"]), - signature_single: matchingObject( - "key", - expectedRowData["signature_single"].sample, - false - ), - bb_reference: matchingObject( - "_id", - expectedRowData["bb_reference"][0].sample, - true - ), - bb_reference_single: matchingObject( - "_id", - expectedRowData["bb_reference_single"].sample, - false - ), - ai: "LLM Output", - }, - ]) - }) - - it("as json", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.JSON - ) - - const json = JSON.parse(exportedValue) - expect(json).toEqual([expectedRowData]) - }) - - it("as json with schema", async () => { - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.JSON_WITH_SCHEMA - ) - - const json = JSON.parse(exportedValue) - expect(json).toEqual({ - schema: expect.any(Object), - rows: [expectedRowData], - }) - }) - - it("can handle csv-special characters in strings", async () => { - const badString = 'test":, wow", "test": "wow"' - const table = await config.api.table.save( - saveTableRequest({ - schema: { - string: { - type: FieldType.STRING, - name: "string", + const json = await config.api.table.csvToJson( + { + csvString: exportedValue, }, - }, + { + status: 200, + } + ) + + expect(json).toHaveLength(1) + expect(json[0].string).toEqual(badString) }) - ) - await config.api.row.save(table._id!, { string: badString }) + it("exported data can be re-imported", async () => { + // export all + const exportedValue = await config.api.row.exportRows( + tableId, + { query: {} }, + RowExportFormat.CSV + ) - const exportedValue = await config.api.row.exportRows( - table._id!, - { query: {} }, - RowExportFormat.CSV - ) + // import all twice + const rows = await config.api.table.csvToJson({ + csvString: exportedValue, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) + await config.api.row.bulkImport(tableId, { + rows, + }) - const json = await config.api.table.csvToJson( - { - csvString: exportedValue, - }, - { - status: 200, - } - ) + const { rows: allRows } = await config.api.row.search(tableId) - expect(json).toHaveLength(1) - expect(json[0].string).toEqual(badString) - }) - - it("exported data can be re-imported", async () => { - // export all - const exportedValue = await config.api.row.exportRows( - tableId, - { query: {} }, - RowExportFormat.CSV - ) - - // import all twice - const rows = await config.api.table.csvToJson({ - csvString: exportedValue, + const expectedRow = { + ...expectedRowData, + _id: expect.any(String), + _rev: expect.any(String), + type: "row", + tableId: tableId, + createdAt: new Date().toISOString(), + updatedAt: new Date().toISOString(), + } + expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) + }) }) - await config.api.row.bulkImport(tableId, { - rows, - }) - await config.api.row.bulkImport(tableId, { - rows, - }) - - const { rows: allRows } = await config.api.row.search(tableId) - - const expectedRow = { - ...expectedRowData, - _id: expect.any(String), - _rev: expect.any(String), - type: "row", - tableId: tableId, - createdAt: new Date().toISOString(), - updatedAt: new Date().toISOString(), - } - expect(allRows).toEqual([expectedRow, expectedRow, expectedRow]) - }) - }) - }) - - let o2mTable: Table - let m2mTable: Table - beforeAll(async () => { - o2mTable = await config.api.table.save(defaultTable()) - m2mTable = await config.api.table.save(defaultTable()) - }) - - describe.each([ - [ - "relationship fields", - (): Record => ({ - user: { - name: "user", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: o2mTable._id!, - fieldName: "fk_o2m", - }, - users: { - name: "users", - relationshipType: RelationshipType.MANY_TO_MANY, - type: FieldType.LINK, - tableId: m2mTable._id!, - fieldName: "fk_m2m", - }, - }), - (tableId: string) => - config.api.row.save(tableId, { - name: uuid.v4(), - description: generator.paragraph(), - tableId, - }), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.name, - }), - ], - [ - "bb reference fields", - (): Record => ({ - user: { - name: "user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - }, - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USERS, - }, - }), - () => config.createUser(), - (row: Row) => ({ - _id: row._id, - primaryDisplay: row.email, - email: row.email, - firstName: row.firstName, - lastName: row.lastName, - }), - ], - ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { - let tableId: string - let o2mData: Row[] - let m2mData: Row[] - - beforeAll(async () => { - const table = await config.api.table.save( - defaultTable({ schema: relSchema() }) - ) - tableId = table._id! - - o2mData = [ - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - await dataGenerator(o2mTable._id!), - ] - - m2mData = [ - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - await dataGenerator(m2mTable._id!), - ] - }) - - it("can save a row when relationship fields are empty", async () => { - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", }) - expect(row).toEqual({ - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - name: "foo", - description: "bar", - tableId, - }) - }) - - it("can save a row with a single relationship field", async () => { - const user = _.sample(o2mData)! - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - user: [user], - }) - - expect(row).toEqual({ - name: "foo", - description: "bar", - tableId, - user: [user].map(u => resultMapper(u)), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can save a row with a multiple relationship field", async () => { - const selectedUsers = _.sampleSize(m2mData, 2) - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: selectedUsers, - }) - - expect(row).toEqual({ - name: "foo", - description: "bar", - tableId, - users: expect.arrayContaining(selectedUsers.map(u => resultMapper(u))), - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("can retrieve rows with no populated relationships", async () => { - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - }) - - const retrieved = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: "foo", - description: "bar", - tableId, - user: undefined, - users: undefined, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - ...defaultRowFields, - }) - }) - - it("can retrieve rows with populated relationships", async () => { - const user1 = _.sample(o2mData)! - const [user2, user3] = _.sampleSize(m2mData, 2) - - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [user2, user3], - user: [user1], - }) - - const retrieved = await config.api.row.get(tableId, row._id!) - expect(retrieved).toEqual({ - name: "foo", - description: "bar", - tableId, - user: expect.arrayContaining([user1].map(u => resultMapper(u))), - users: expect.arrayContaining([user2, user3].map(u => resultMapper(u))), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, - ...defaultRowFields, - }) - }) - - it("can update an existing populated row", async () => { - const user = _.sample(o2mData)! - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [users1, users2], - }) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: [user], - users: [users3, users1], - }) - expect(updatedRow).toEqual({ - name: "foo", - description: "bar", - tableId, - user: expect.arrayContaining([user].map(u => resultMapper(u))), - users: expect.arrayContaining( - [users3, users1].map(u => resultMapper(u)) - ), - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, - }) - }) - - it("can wipe an existing populated relationships in row", async () => { - const [user1, user2] = _.sampleSize(m2mData, 2) - const row = await config.api.row.save(tableId, { - name: "foo", - description: "bar", - users: [user1, user2], - }) - - const updatedRow = await config.api.row.save(tableId, { - ...row, - user: null, - users: null, - }) - expect(updatedRow).toEqual({ - name: "foo", - description: "bar", - tableId, - _id: row._id, - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - type: isInternal ? "row" : undefined, - }) - }) - - it("fetch all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows = [ - { - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.fetch(tableId) - - expect(res).toEqual( - expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ) - ) - }) - - it("search all will populate the relationships", async () => { - const [user1] = _.sampleSize(o2mData, 1) - const [users1, users2, users3] = _.sampleSize(m2mData, 3) - - const rows = [ - { - name: generator.name(), - description: generator.name(), - users: [users1, users2], - }, - { - name: generator.name(), - description: generator.name(), - user: [user1], - users: [users1, users3], - }, - { - name: generator.name(), - description: generator.name(), - users: [users3], - }, - ] - - await config.api.row.save(tableId, rows[0]) - await config.api.row.save(tableId, rows[1]) - await config.api.row.save(tableId, rows[2]) - - const res = await config.api.row.search(tableId) - - expect(res).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - name: r.name, - description: r.description, - tableId, - user: r.user?.map(u => resultMapper(u)), - users: r.users?.length - ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) - : undefined, - _id: expect.any(String), - _rev: expect.any(String), - id: isInternal ? undefined : expect.any(Number), - [`fk_${o2mTable.name}_fk_o2m`]: - isInternal || !r.user?.length ? undefined : r.user[0].id, - ...defaultRowFields, - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - }), - }) - }) - }) - - // Upserting isn't yet supported in MSSQL or Oracle, see: - // https://github.com/knex/knex/pull/6050 - !isMSSQL && - !isOracle && - describe("relationships", () => { - let tableId: string - let viewId: string - - let auxData: Row[] = [] - + let o2mTable: Table + let m2mTable: Table beforeAll(async () => { - const aux2Table = await config.api.table.save(saveTableRequest()) - const aux2Data = await config.api.row.save(aux2Table._id!, {}) - - const auxTable = await config.api.table.save( - saveTableRequest({ - primaryDisplay: "name", - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, - }, - age: { - name: "age", - type: FieldType.NUMBER, - constraints: { presence: true }, - }, - address: { - name: "address", - type: FieldType.STRING, - constraints: { presence: true }, - visible: false, - }, - link: { - name: "link", - type: FieldType.LINK, - tableId: aux2Table._id!, - relationshipType: RelationshipType.MANY_TO_MANY, - fieldName: "fk_aux", - constraints: { presence: true }, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: "{{ any }}", - constraints: { presence: true }, - }, - }, - }) - ) - const auxTableId = auxTable._id! - - for (const name of generator.unique(() => generator.name(), 10)) { - auxData.push( - await config.api.row.save(auxTableId, { - name, - age: generator.age(), - address: generator.address(), - link: [aux2Data], - }) - ) - } - - const table = await config.api.table.save( - saveTableRequest({ - schema: { - title: { - name: "title", - type: FieldType.STRING, - constraints: { presence: true }, - }, - relWithNoSchema: { - name: "relWithNoSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithNoSchema", - constraints: { presence: true }, - }, - relWithEmptySchema: { - name: "relWithEmptySchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithEmptySchema", - constraints: { presence: true }, - }, - relWithFullSchema: { - name: "relWithFullSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithFullSchema", - constraints: { presence: true }, - }, - relWithHalfSchema: { - name: "relWithHalfSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithHalfSchema", - constraints: { presence: true }, - }, - relWithIllegalSchema: { - name: "relWithIllegalSchema", - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: auxTableId, - fieldName: "fk_relWithIllegalSchema", - constraints: { presence: true }, - }, - }, - }) - ) - tableId = table._id! - const view = await config.api.viewV2.create({ - name: generator.guid(), - tableId, - schema: { - title: { - visible: true, - }, - relWithNoSchema: { - visible: true, - }, - relWithEmptySchema: { - visible: true, - columns: {}, - }, - relWithFullSchema: { - visible: true, - columns: Object.keys(auxTable.schema).reduce< - Record - >((acc, c) => ({ ...acc, [c]: { visible: true } }), {}), - }, - relWithHalfSchema: { - visible: true, - columns: { - name: { visible: true }, - age: { visible: false, readonly: true }, - }, - }, - relWithIllegalSchema: { - visible: true, - columns: { - name: { visible: true }, - address: { visible: true }, - unexisting: { visible: true }, - }, - }, - }, - }) - - viewId = view.id + o2mTable = await config.api.table.save(defaultTable()) + m2mTable = await config.api.table.save(defaultTable()) }) - const testScenarios: [string, (row: Row) => Promise | Row][] = [ - ["get row", (row: Row) => config.api.row.get(viewId, row._id!)], + describe.each([ [ - "from view search", - async (row: Row) => { - const { rows } = await config.api.viewV2.search(viewId) - return rows.find(r => r._id === row._id!) - }, - ], - ["from original saved row", (row: Row) => row], - ["from updated row", (row: Row) => config.api.row.save(viewId, row)], - ] - - it.each(testScenarios)( - "can retrieve rows with populated relationships (via %s)", - async (__, retrieveDelegate) => { - const otherRows = _.sampleSize(auxData, 5) - - const row = await config.api.row.save(viewId, { - title: generator.word(), - relWithNoSchema: [otherRows[0]], - relWithEmptySchema: [otherRows[1]], - relWithFullSchema: [otherRows[2]], - relWithHalfSchema: [otherRows[3]], - relWithIllegalSchema: [otherRows[4]], - }) - - const retrieved = await retrieveDelegate(row) - - expect(retrieved).toEqual( - expect.objectContaining({ - title: row.title, - relWithNoSchema: [ - { - _id: otherRows[0]._id, - primaryDisplay: otherRows[0].name, - }, - ], - relWithEmptySchema: [ - { - _id: otherRows[1]._id, - primaryDisplay: otherRows[1].name, - }, - ], - relWithFullSchema: [ - { - _id: otherRows[2]._id, - primaryDisplay: otherRows[2].name, - name: otherRows[2].name, - age: otherRows[2].age, - id: otherRows[2].id, - }, - ], - relWithHalfSchema: [ - { - _id: otherRows[3]._id, - primaryDisplay: otherRows[3].name, - name: otherRows[3].name, - }, - ], - relWithIllegalSchema: [ - { - _id: otherRows[4]._id, - primaryDisplay: otherRows[4].name, - name: otherRows[4].name, - }, - ], - }) - ) - } - ) - - it.each([ - [ - "from table fetch", - async (row: Row) => { - const rows = await config.api.row.fetch(tableId) - return rows.find(r => r._id === row._id!) - }, - ], - [ - "from table search", - async (row: Row) => { - const { rows } = await config.api.row.search(tableId) - return rows.find(r => r._id === row._id!) - }, - ], - ])( - "does not enrich when fetching from the table (via %s)", - async (__, retrieveDelegate) => { - const otherRows = _.sampleSize(auxData, 5) - - const row = await config.api.row.save(viewId, { - title: generator.word(), - relWithNoSchema: [otherRows[0]], - relWithEmptySchema: [otherRows[1]], - relWithFullSchema: [otherRows[2]], - relWithHalfSchema: [otherRows[3]], - relWithIllegalSchema: [otherRows[4]], - }) - - const retrieved = await retrieveDelegate(row) - - expect(retrieved).toEqual( - expect.objectContaining({ - title: row.title, - relWithNoSchema: [ - { - _id: otherRows[0]._id, - primaryDisplay: otherRows[0].name, - }, - ], - relWithEmptySchema: [ - { - _id: otherRows[1]._id, - primaryDisplay: otherRows[1].name, - }, - ], - relWithFullSchema: [ - { - _id: otherRows[2]._id, - primaryDisplay: otherRows[2].name, - }, - ], - relWithHalfSchema: [ - { - _id: otherRows[3]._id, - primaryDisplay: otherRows[3].name, - }, - ], - relWithIllegalSchema: [ - { - _id: otherRows[4]._id, - primaryDisplay: otherRows[4].name, - }, - ], - }) - ) - } - ) - }) - - isInternal && - describe("AI fields", () => { - let table: Table - - beforeAll(async () => { - mocks.licenses.useBudibaseAI() - mocks.licenses.useAICustomConfigs() - table = await config.api.table.save( - saveTableRequest({ - schema: { - ai: { - name: "ai", - type: FieldType.AI, - operation: AIOperationEnum.PROMPT, - prompt: "Convert the following to German: '{{ product }}'", - }, - product: { - name: "product", - type: FieldType.STRING, - }, - }, - }) - ) - - await config.api.row.save(table._id!, { - product: generator.word(), - }) - }) - - afterAll(() => { - jest.unmock("@budibase/pro") - }) - - it("should be able to save a row with an AI column", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - expect(rows[0].ai).toEqual("Mock LLM Response") - }) - - it("should be able to update a row with an AI column", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - await config.api.row.save(table._id!, { - product: generator.word(), - ...rows[0], - }) - expect(rows.length).toBe(1) - expect(rows[0].ai).toEqual("Mock LLM Response") - }) - }) - - describe("Formula fields", () => { - let table: Table - let otherTable: Table - let relatedRow: Row, mainRow: Row - - beforeAll(async () => { - otherTable = await config.api.table.save(defaultTable()) - table = await config.api.table.save( - saveTableRequest({ - schema: { - links: { - name: "links", - fieldName: "links", - type: FieldType.LINK, - tableId: otherTable._id!, + "relationship fields", + (): Record => ({ + user: { + name: "user", relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: o2mTable._id!, + fieldName: "fk_o2m", }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: "{{ links.0.name }}", - formulaType: FormulaType.DYNAMIC, + users: { + name: "users", + relationshipType: RelationshipType.MANY_TO_MANY, + type: FieldType.LINK, + tableId: m2mTable._id!, + fieldName: "fk_m2m", }, - }, + }), + (tableId: string) => + config.api.row.save(tableId, { + name: uuid.v4(), + description: generator.paragraph(), + tableId, + }), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.name, + }), + ], + [ + "bb reference fields", + (): Record => ({ + user: { + name: "user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + }, + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USERS, + }, + }), + () => config.createUser(), + (row: Row) => ({ + _id: row._id, + primaryDisplay: row.email, + email: row.email, + firstName: row.firstName, + lastName: row.lastName, + }), + ], + ])("links - %s", (__, relSchema, dataGenerator, resultMapper) => { + let tableId: string + let o2mData: Row[] + let m2mData: Row[] + + beforeAll(async () => { + const table = await config.api.table.save( + defaultTable({ schema: relSchema() }) + ) + tableId = table._id! + + o2mData = [ + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + await dataGenerator(o2mTable._id!), + ] + + m2mData = [ + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + await dataGenerator(m2mTable._id!), + ] }) - ) - relatedRow = await config.api.row.save(otherTable._id!, { - name: generator.word(), - description: generator.paragraph(), - }) - mainRow = await config.api.row.save(table._id!, { - name: generator.word(), - description: generator.paragraph(), - tableId: table._id!, - links: [relatedRow._id], - }) - }) + it("can save a row when relationship fields are empty", async () => { + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + }) - async function updateFormulaColumn( - formula: string, - opts?: { responseType?: FormulaResponseType; formulaType?: FormulaType } - ) { - table = await config.api.table.save({ - ...table, - schema: { - ...table.schema, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: formula, - responseType: opts?.responseType, - formulaType: opts?.formulaType || FormulaType.DYNAMIC, - }, - }, - }) - } - - it("should be able to search for rows containing formulas", async () => { - const { rows } = await config.api.row.search(table._id!) - expect(rows.length).toBe(1) - expect(rows[0].links.length).toBe(1) - const row = rows[0] - expect(row.formula).toBe(relatedRow.name) - }) - - it("should coerce - number response type", async () => { - await updateFormulaColumn(encodeJS("return 1"), { - responseType: FieldType.NUMBER, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(1) - }) - - it("should coerce - boolean response type", async () => { - await updateFormulaColumn(encodeJS("return true"), { - responseType: FieldType.BOOLEAN, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(true) - }) - - it("should coerce - datetime response type", async () => { - await updateFormulaColumn(encodeJS("return new Date()"), { - responseType: FieldType.DATETIME, - }) - const { rows } = await config.api.row.search(table._id!) - expect(isDate(rows[0].formula)).toBe(true) - }) - - it("should coerce - datetime with invalid value", async () => { - await updateFormulaColumn(encodeJS("return 'a'"), { - responseType: FieldType.DATETIME, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBeUndefined() - }) - - it("should coerce handlebars", async () => { - await updateFormulaColumn("{{ add 1 1 }}", { - responseType: FieldType.NUMBER, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(2) - }) - - it("should coerce handlebars to string (default)", async () => { - await updateFormulaColumn("{{ add 1 1 }}", { - responseType: FieldType.STRING, - }) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe("2") - }) - - isInternal && - it("should coerce a static handlebars formula", async () => { - await updateFormulaColumn(encodeJS("return 1"), { - responseType: FieldType.NUMBER, - formulaType: FormulaType.STATIC, + expect(row).toEqual({ + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + name: "foo", + description: "bar", + tableId, + }) + }) + + it("can save a row with a single relationship field", async () => { + const user = _.sample(o2mData)! + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + user: [user], + }) + + expect(row).toEqual({ + name: "foo", + description: "bar", + tableId, + user: [user].map(u => resultMapper(u)), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can save a row with a multiple relationship field", async () => { + const selectedUsers = _.sampleSize(m2mData, 2) + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: selectedUsers, + }) + + expect(row).toEqual({ + name: "foo", + description: "bar", + tableId, + users: expect.arrayContaining( + selectedUsers.map(u => resultMapper(u)) + ), + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("can retrieve rows with no populated relationships", async () => { + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + }) + + const retrieved = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: "foo", + description: "bar", + tableId, + user: undefined, + users: undefined, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + ...defaultRowFields, + }) + }) + + it("can retrieve rows with populated relationships", async () => { + const user1 = _.sample(o2mData)! + const [user2, user3] = _.sampleSize(m2mData, 2) + + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [user2, user3], + user: [user1], + }) + + const retrieved = await config.api.row.get(tableId, row._id!) + expect(retrieved).toEqual({ + name: "foo", + description: "bar", + tableId, + user: expect.arrayContaining([user1].map(u => resultMapper(u))), + users: expect.arrayContaining( + [user2, user3].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id, + ...defaultRowFields, + }) + }) + + it("can update an existing populated row", async () => { + const user = _.sample(o2mData)! + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [users1, users2], + }) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: [user], + users: [users3, users1], + }) + expect(updatedRow).toEqual({ + name: "foo", + description: "bar", + tableId, + user: expect.arrayContaining([user].map(u => resultMapper(u))), + users: expect.arrayContaining( + [users3, users1].map(u => resultMapper(u)) + ), + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + [`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id, + }) + }) + + it("can wipe an existing populated relationships in row", async () => { + const [user1, user2] = _.sampleSize(m2mData, 2) + const row = await config.api.row.save(tableId, { + name: "foo", + description: "bar", + users: [user1, user2], + }) + + const updatedRow = await config.api.row.save(tableId, { + ...row, + user: null, + users: null, + }) + expect(updatedRow).toEqual({ + name: "foo", + description: "bar", + tableId, + _id: row._id, + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + type: isInternal ? "row" : undefined, + }) + }) + + it("fetch all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows = [ + { + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.fetch(tableId) + + expect(res).toEqual( + expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ) + ) + }) + + it("search all will populate the relationships", async () => { + const [user1] = _.sampleSize(o2mData, 1) + const [users1, users2, users3] = _.sampleSize(m2mData, 3) + + const rows = [ + { + name: generator.name(), + description: generator.name(), + users: [users1, users2], + }, + { + name: generator.name(), + description: generator.name(), + user: [user1], + users: [users1, users3], + }, + { + name: generator.name(), + description: generator.name(), + users: [users3], + }, + ] + + await config.api.row.save(tableId, rows[0]) + await config.api.row.save(tableId, rows[1]) + await config.api.row.save(tableId, rows[2]) + + const res = await config.api.row.search(tableId) + + expect(res).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + name: r.name, + description: r.description, + tableId, + user: r.user?.map(u => resultMapper(u)), + users: r.users?.length + ? expect.arrayContaining(r.users?.map(u => resultMapper(u))) + : undefined, + _id: expect.any(String), + _rev: expect.any(String), + id: isInternal ? undefined : expect.any(Number), + [`fk_${o2mTable.name}_fk_o2m`]: + isInternal || !r.user?.length ? undefined : r.user[0].id, + ...defaultRowFields, + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + }), + }) }) - // save the row to store the static value - await config.api.row.save(table._id!, mainRow) - const { rows } = await config.api.row.search(table._id!) - expect(rows[0].formula).toBe(1) }) - }) - describe("Formula JS protection", () => { - it("should time out JS execution if a single cell takes too long", async () => { - await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => { - const js = encodeJS( - ` - let i = 0; - while (true) { - i++; - } - return i; - ` - ) + // Upserting isn't yet supported in MSSQL or Oracle, see: + // https://github.com/knex/knex/pull/6050 + !isMSSQL && + !isOracle && + describe("relationships", () => { + let tableId: string + let viewId: string - const table = await config.api.table.save( - saveTableRequest({ - schema: { - text: { - name: "text", - type: FieldType.STRING, + let auxData: Row[] = [] + + beforeAll(async () => { + const aux2Table = await config.api.table.save(saveTableRequest()) + const aux2Data = await config.api.row.save(aux2Table._id!, {}) + + const auxTable = await config.api.table.save( + saveTableRequest({ + primaryDisplay: "name", + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + age: { + name: "age", + type: FieldType.NUMBER, + constraints: { presence: true }, + }, + address: { + name: "address", + type: FieldType.STRING, + constraints: { presence: true }, + visible: false, + }, + link: { + name: "link", + type: FieldType.LINK, + tableId: aux2Table._id!, + relationshipType: RelationshipType.MANY_TO_MANY, + fieldName: "fk_aux", + constraints: { presence: true }, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: "{{ any }}", + constraints: { presence: true }, + }, + }, + }) + ) + const auxTableId = auxTable._id! + + for (const name of generator.unique(() => generator.name(), 10)) { + auxData.push( + await config.api.row.save(auxTableId, { + name, + age: generator.age(), + address: generator.address(), + link: [aux2Data], + }) + ) + } + + const table = await config.api.table.save( + saveTableRequest({ + schema: { + title: { + name: "title", + type: FieldType.STRING, + constraints: { presence: true }, + }, + relWithNoSchema: { + name: "relWithNoSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithNoSchema", + constraints: { presence: true }, + }, + relWithEmptySchema: { + name: "relWithEmptySchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithEmptySchema", + constraints: { presence: true }, + }, + relWithFullSchema: { + name: "relWithFullSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithFullSchema", + constraints: { presence: true }, + }, + relWithHalfSchema: { + name: "relWithHalfSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithHalfSchema", + constraints: { presence: true }, + }, + relWithIllegalSchema: { + name: "relWithIllegalSchema", + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: auxTableId, + fieldName: "fk_relWithIllegalSchema", + constraints: { presence: true }, + }, + }, + }) + ) + tableId = table._id! + const view = await config.api.viewV2.create({ + name: generator.guid(), + tableId, + schema: { + title: { + visible: true, + }, + relWithNoSchema: { + visible: true, + }, + relWithEmptySchema: { + visible: true, + columns: {}, + }, + relWithFullSchema: { + visible: true, + columns: Object.keys(auxTable.schema).reduce< + Record + >((acc, c) => ({ ...acc, [c]: { visible: true } }), {}), + }, + relWithHalfSchema: { + visible: true, + columns: { + name: { visible: true }, + age: { visible: false, readonly: true }, + }, + }, + relWithIllegalSchema: { + visible: true, + columns: { + name: { visible: true }, + address: { visible: true }, + unexisting: { visible: true }, + }, + }, }, + }) + + viewId = view.id + }) + + const testScenarios: [string, (row: Row) => Promise | Row][] = [ + ["get row", (row: Row) => config.api.row.get(viewId, row._id!)], + [ + "from view search", + async (row: Row) => { + const { rows } = await config.api.viewV2.search(viewId) + return rows.find(r => r._id === row._id!) + }, + ], + ["from original saved row", (row: Row) => row], + [ + "from updated row", + (row: Row) => config.api.row.save(viewId, row), + ], + ] + + it.each(testScenarios)( + "can retrieve rows with populated relationships (via %s)", + async (__, retrieveDelegate) => { + const otherRows = _.sampleSize(auxData, 5) + + const row = await config.api.row.save(viewId, { + title: generator.word(), + relWithNoSchema: [otherRows[0]], + relWithEmptySchema: [otherRows[1]], + relWithFullSchema: [otherRows[2]], + relWithHalfSchema: [otherRows[3]], + relWithIllegalSchema: [otherRows[4]], + }) + + const retrieved = await retrieveDelegate(row) + + expect(retrieved).toEqual( + expect.objectContaining({ + title: row.title, + relWithNoSchema: [ + { + _id: otherRows[0]._id, + primaryDisplay: otherRows[0].name, + }, + ], + relWithEmptySchema: [ + { + _id: otherRows[1]._id, + primaryDisplay: otherRows[1].name, + }, + ], + relWithFullSchema: [ + { + _id: otherRows[2]._id, + primaryDisplay: otherRows[2].name, + name: otherRows[2].name, + age: otherRows[2].age, + id: otherRows[2].id, + }, + ], + relWithHalfSchema: [ + { + _id: otherRows[3]._id, + primaryDisplay: otherRows[3].name, + name: otherRows[3].name, + }, + ], + relWithIllegalSchema: [ + { + _id: otherRows[4]._id, + primaryDisplay: otherRows[4].name, + name: otherRows[4].name, + }, + ], + }) + ) + } + ) + + it.each([ + [ + "from table fetch", + async (row: Row) => { + const rows = await config.api.row.fetch(tableId) + return rows.find(r => r._id === row._id!) + }, + ], + [ + "from table search", + async (row: Row) => { + const { rows } = await config.api.row.search(tableId) + return rows.find(r => r._id === row._id!) + }, + ], + ])( + "does not enrich when fetching from the table (via %s)", + async (__, retrieveDelegate) => { + const otherRows = _.sampleSize(auxData, 5) + + const row = await config.api.row.save(viewId, { + title: generator.word(), + relWithNoSchema: [otherRows[0]], + relWithEmptySchema: [otherRows[1]], + relWithFullSchema: [otherRows[2]], + relWithHalfSchema: [otherRows[3]], + relWithIllegalSchema: [otherRows[4]], + }) + + const retrieved = await retrieveDelegate(row) + + expect(retrieved).toEqual( + expect.objectContaining({ + title: row.title, + relWithNoSchema: [ + { + _id: otherRows[0]._id, + primaryDisplay: otherRows[0].name, + }, + ], + relWithEmptySchema: [ + { + _id: otherRows[1]._id, + primaryDisplay: otherRows[1].name, + }, + ], + relWithFullSchema: [ + { + _id: otherRows[2]._id, + primaryDisplay: otherRows[2].name, + }, + ], + relWithHalfSchema: [ + { + _id: otherRows[3]._id, + primaryDisplay: otherRows[3].name, + }, + ], + relWithIllegalSchema: [ + { + _id: otherRows[4]._id, + primaryDisplay: otherRows[4].name, + }, + ], + }) + ) + } + ) + }) + + isInternal && + describe("AI fields", () => { + let table: Table + + beforeAll(async () => { + mocks.licenses.useBudibaseAI() + mocks.licenses.useAICustomConfigs() + table = await config.api.table.save( + saveTableRequest({ + schema: { + ai: { + name: "ai", + type: FieldType.AI, + operation: AIOperationEnum.PROMPT, + prompt: "Convert the following to German: '{{ product }}'", + }, + product: { + name: "product", + type: FieldType.STRING, + }, + }, + }) + ) + + await config.api.row.save(table._id!, { + product: generator.word(), + }) + }) + + afterAll(() => { + jest.unmock("@budibase/pro") + }) + + it("should be able to save a row with an AI column", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + expect(rows[0].ai).toEqual("Mock LLM Response") + }) + + it("should be able to update a row with an AI column", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + await config.api.row.save(table._id!, { + product: generator.word(), + ...rows[0], + }) + expect(rows.length).toBe(1) + expect(rows[0].ai).toEqual("Mock LLM Response") + }) + }) + + describe("Formula fields", () => { + let table: Table + let otherTable: Table + let relatedRow: Row, mainRow: Row + + beforeAll(async () => { + otherTable = await config.api.table.save(defaultTable()) + table = await config.api.table.save( + saveTableRequest({ + schema: { + links: { + name: "links", + fieldName: "links", + type: FieldType.LINK, + tableId: otherTable._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: "{{ links.0.name }}", + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + ) + + relatedRow = await config.api.row.save(otherTable._id!, { + name: generator.word(), + description: generator.paragraph(), + }) + mainRow = await config.api.row.save(table._id!, { + name: generator.word(), + description: generator.paragraph(), + tableId: table._id!, + links: [relatedRow._id], + }) + }) + + async function updateFormulaColumn( + formula: string, + opts?: { + responseType?: FormulaResponseType + formulaType?: FormulaType + } + ) { + table = await config.api.table.save({ + ...table, + schema: { + ...table.schema, formula: { name: "formula", type: FieldType.FORMULA, - formula: js, - formulaType: FormulaType.DYNAMIC, + formula: formula, + responseType: opts?.responseType, + formulaType: opts?.formulaType || FormulaType.DYNAMIC, }, }, }) - ) + } - await config.api.row.save(table._id!, { text: "foo" }) - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(1) - const row = rows[0] - expect(row.text).toBe("foo") - expect(row.formula).toBe("Timed out while executing JS") + it("should be able to search for rows containing formulas", async () => { + const { rows } = await config.api.row.search(table._id!) + expect(rows.length).toBe(1) + expect(rows[0].links.length).toBe(1) + const row = rows[0] + expect(row.formula).toBe(relatedRow.name) + }) + + it("should coerce - number response type", async () => { + await updateFormulaColumn(encodeJS("return 1"), { + responseType: FieldType.NUMBER, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(1) + }) + + it("should coerce - boolean response type", async () => { + await updateFormulaColumn(encodeJS("return true"), { + responseType: FieldType.BOOLEAN, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(true) + }) + + it("should coerce - datetime response type", async () => { + await updateFormulaColumn(encodeJS("return new Date()"), { + responseType: FieldType.DATETIME, + }) + const { rows } = await config.api.row.search(table._id!) + expect(isDate(rows[0].formula)).toBe(true) + }) + + it("should coerce - datetime with invalid value", async () => { + await updateFormulaColumn(encodeJS("return 'a'"), { + responseType: FieldType.DATETIME, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBeUndefined() + }) + + it("should coerce handlebars", async () => { + await updateFormulaColumn("{{ add 1 1 }}", { + responseType: FieldType.NUMBER, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(2) + }) + + it("should coerce handlebars to string (default)", async () => { + await updateFormulaColumn("{{ add 1 1 }}", { + responseType: FieldType.STRING, + }) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe("2") + }) + + isInternal && + it("should coerce a static handlebars formula", async () => { + await updateFormulaColumn(encodeJS("return 1"), { + responseType: FieldType.NUMBER, + formulaType: FormulaType.STATIC, + }) + // save the row to store the static value + await config.api.row.save(table._id!, mainRow) + const { rows } = await config.api.row.search(table._id!) + expect(rows[0].formula).toBe(1) + }) }) - }) - it("should time out JS execution if a multiple cells take too long", async () => { - await withEnv( - { - JS_PER_INVOCATION_TIMEOUT_MS: 40, - JS_PER_REQUEST_TIMEOUT_MS: 80, - }, - async () => { - const js = encodeJS( - ` + describe("Formula JS protection", () => { + it("should time out JS execution if a single cell takes too long", async () => { + await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => { + const js = encodeJS( + ` let i = 0; while (true) { i++; } return i; ` - ) + ) + const table = await config.api.table.save( + saveTableRequest({ + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: js, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + ) + + await config.api.row.save(table._id!, { text: "foo" }) + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(1) + const row = rows[0] + expect(row.text).toBe("foo") + expect(row.formula).toBe("Timed out while executing JS") + }) + }) + + it("should time out JS execution if a multiple cells take too long", async () => { + await withEnv( + { + JS_PER_INVOCATION_TIMEOUT_MS: 40, + JS_PER_REQUEST_TIMEOUT_MS: 80, + }, + async () => { + const js = encodeJS( + ` + let i = 0; + while (true) { + i++; + } + return i; + ` + ) + + const table = await config.api.table.save( + saveTableRequest({ + schema: { + text: { + name: "text", + type: FieldType.STRING, + }, + formula: { + name: "formula", + type: FieldType.FORMULA, + formula: js, + formulaType: FormulaType.DYNAMIC, + }, + }, + }) + ) + + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { text: "foo" }) + } + + // Run this test 3 times to make sure that there's no cross-request + // pollution of the execution time tracking. + for (let reqs = 0; reqs < 3; reqs++) { + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) + + let i = 0 + for (; i < 10; i++) { + const row = rows[i] + if (row.formula !== JsTimeoutError.message) { + break + } + } + + // Given the execution times are not deterministic, we can't be sure + // of the exact number of rows that were executed before the timeout + // but it should absolutely be at least 1. + expect(i).toBeGreaterThan(0) + expect(i).toBeLessThan(5) + + for (; i < 10; i++) { + const row = rows[i] + expect(row.text).toBe("foo") + expect(row.formula).toStartWith("CPU time limit exceeded ") + } + } + } + ) + }) + + it("should not carry over context between formulas", async () => { + const js = encodeJS(`return $("[text]");`) const table = await config.api.table.save( saveTableRequest({ schema: { @@ -3391,82 +3472,29 @@ describe.each(databases)("/rows (%s)", dbName => { ) for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: "foo" }) + await config.api.row.save(table._id!, { text: `foo${i}` }) } - // Run this test 3 times to make sure that there's no cross-request - // pollution of the execution time tracking. - for (let reqs = 0; reqs < 3; reqs++) { - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) + const { rows } = await config.api.row.search(table._id!) + expect(rows).toHaveLength(10) - let i = 0 - for (; i < 10; i++) { - const row = rows[i] - if (row.formula !== JsTimeoutError.message) { - break - } - } - - // Given the execution times are not deterministic, we can't be sure - // of the exact number of rows that were executed before the timeout - // but it should absolutely be at least 1. - expect(i).toBeGreaterThan(0) - expect(i).toBeLessThan(5) - - for (; i < 10; i++) { - const row = rows[i] - expect(row.text).toBe("foo") - expect(row.formula).toStartWith("CPU time limit exceeded ") - } - } - } - ) - }) - - it("should not carry over context between formulas", async () => { - const js = encodeJS(`return $("[text]");`) - const table = await config.api.table.save( - saveTableRequest({ - schema: { - text: { - name: "text", - type: FieldType.STRING, - }, - formula: { - name: "formula", - type: FieldType.FORMULA, - formula: js, - formulaType: FormulaType.DYNAMIC, - }, - }, + const formulaValues = rows.map(r => r.formula) + expect(formulaValues).toEqual( + expect.arrayContaining([ + "foo0", + "foo1", + "foo2", + "foo3", + "foo4", + "foo5", + "foo6", + "foo7", + "foo8", + "foo9", + ]) + ) }) - ) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { text: `foo${i}` }) - } - - const { rows } = await config.api.row.search(table._id!) - expect(rows).toHaveLength(10) - - const formulaValues = rows.map(r => r.formula) - expect(formulaValues).toEqual( - expect.arrayContaining([ - "foo0", - "foo1", - "foo2", - "foo3", - "foo4", - "foo5", - "foo6", - "foo7", - "foo8", - "foo9", - ]) - ) - }) - }) -}) - -// todo: remove me + }) + } + ) +} diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts index 0c5c308fb2..dcdaece191 100644 --- a/packages/server/src/integrations/tests/utils/index.ts +++ b/packages/server/src/integrations/tests/utils/index.ts @@ -101,12 +101,9 @@ function createDummyTest() { }) } -export function datasourceDescribe( - opts: DatasourceDescribeOpts -): DatabaseName[] { +export function datasourceDescribe(opts: DatasourceDescribeOpts) { if (process.env.DATASOURCE === "none") { createDummyTest() - process.exit(0) } const { only, exclude } = opts @@ -128,15 +125,11 @@ export function datasourceDescribe( if (databases.length === 0) { createDummyTest() - process.exit(0) } - return databases -} - -export function datasourceProps(dbName: DatabaseName) { const config = new TestConfiguration() - return { + return databases.map(dbName => ({ + dbName, config, dsProvider: () => createDatasources(config, dbName), isInternal: dbName === DatabaseName.SQS, @@ -153,7 +146,7 @@ export function datasourceProps(dbName: DatabaseName) { isMongodb: dbName === DatabaseName.MONGODB, isMSSQL: dbName === DatabaseName.SQL_SERVER, isOracle: dbName === DatabaseName.ORACLE, - } + })) } function getDatasource( From 66ade79ff92b76b38fd969fd95778a6574468c6f Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Mon, 18 Nov 2024 17:55:40 +0000 Subject: [PATCH 03/16] Updating rest of test cases. --- .../src/api/routes/tests/datasource.spec.ts | 13 +- .../routes/tests/queries/generic-sql.spec.ts | 1692 ++-- .../api/routes/tests/queries/mongodb.spec.ts | 1252 +-- .../src/api/routes/tests/rowAction.spec.ts | 110 +- .../src/api/routes/tests/search.spec.ts | 7201 ++++++++------- .../server/src/api/routes/tests/table.spec.ts | 2672 +++--- .../src/api/routes/tests/viewV2.spec.ts | 8180 +++++++++-------- .../automations/tests/executeQuery.spec.ts | 125 +- .../tests/scenarios/scenarios.spec.ts | 11 +- .../server/src/integration-test/mysql.spec.ts | 204 +- .../src/integration-test/postgres.spec.ts | 477 +- .../sdk/app/rows/search/tests/search.spec.ts | 364 +- 12 files changed, 11243 insertions(+), 11058 deletions(-) diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index 6b811cc716..f3fac5b99b 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -164,9 +164,12 @@ describe("/datasources", () => { }) }) -datasourceDescribe( - { name: "%s", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] }, - ({ config, dsProvider }) => { +const descriptions = datasourceDescribe({ + exclude: [DatabaseName.MONGODB, DatabaseName.SQS], +}) + +if (descriptions.length) { + describe.each(descriptions)("$dbName", ({ config, dsProvider }) => { let datasource: Datasource let rawDatasource: Datasource let client: Knex @@ -492,5 +495,5 @@ datasourceDescribe( ) }) }) - } -) + }) +} diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts index aea4e5b2a2..44b21e0350 100644 --- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts +++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts @@ -14,583 +14,479 @@ import { events } from "@budibase/backend-core" import { Knex } from "knex" import { generator } from "@budibase/backend-core/tests" -datasourceDescribe( - { name: "queries (%s)", exclude: [DatabaseName.MONGODB, DatabaseName.SQS] }, - ({ config, dsProvider, isOracle, isMSSQL, isPostgres }) => { - let rawDatasource: Datasource - let datasource: Datasource - let client: Knex +const descriptions = datasourceDescribe({ + exclude: [DatabaseName.MONGODB, DatabaseName.SQS], +}) - let tableName: string +if (descriptions.length) { + describe.each(descriptions)( + "queries ($dbName)", + ({ config, dsProvider, isOracle, isMSSQL, isPostgres }) => { + let rawDatasource: Datasource + let datasource: Datasource + let client: Knex - async function createQuery( - query: Partial, - expectations?: Expectations - ): Promise { - const defaultQuery: Query = { - datasourceId: datasource._id!, - name: "New Query", - parameters: [], - fields: {}, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - } - if (query.fields?.sql && typeof query.fields.sql !== "string") { - throw new Error("Unable to create with knex structure in 'sql' field") - } - return await config.api.query.save( - { ...defaultQuery, ...query }, - expectations - ) - } + let tableName: string - beforeAll(async () => { - const ds = await dsProvider() - rawDatasource = ds.rawDatasource! - datasource = ds.datasource! - client = ds.client! - }) - - beforeEach(async () => { - // The Datasource API doesn ot return the password, but we need it later to - // connect to the underlying database, so we fill it back in here. - datasource.config!.password = rawDatasource.config!.password - - tableName = generator.guid() - - await client.schema.dropTableIfExists(tableName) - await client.schema.createTable(tableName, table => { - table.increments("id").primary() - table.string("name") - table.timestamp("birthday") - table.integer("number") - }) - - await client(tableName).insert([ - { name: "one" }, - { name: "two" }, - { name: "three" }, - { name: "four" }, - { name: "five" }, - ]) - - jest.clearAllMocks() - }) - - describe("query admin", () => { - describe("create", () => { - it("should be able to create a query", async () => { - const query = await createQuery({ - name: "New Query", - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - expect(query).toMatchObject({ - datasourceId: datasource._id!, - name: "New Query", - parameters: [], - fields: { - sql: client(tableName).select("*").toString(), - }, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - createdAt: expect.any(String), - updatedAt: expect.any(String), - }) - - expect(events.query.created).toHaveBeenCalledTimes(1) - expect(events.query.updated).not.toHaveBeenCalled() - }) - }) - - describe("update", () => { - it("should be able to update a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - jest.clearAllMocks() - - const updatedQuery = await config.api.query.save({ - ...query, - name: "Updated Query", - fields: { - sql: client(tableName).where({ id: 1 }).toString(), - }, - }) - - expect(updatedQuery).toMatchObject({ - datasourceId: datasource._id!, - name: "Updated Query", - parameters: [], - fields: { - sql: client(tableName).where({ id: 1 }).toString(), - }, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - }) - - expect(events.query.created).not.toHaveBeenCalled() - expect(events.query.updated).toHaveBeenCalledTimes(1) - }) - }) - - describe("delete", () => { - it("should be able to delete a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - await config.api.query.delete(query) - await config.api.query.get(query._id!, { status: 404 }) - - const queries = await config.api.query.fetch() - expect(queries).not.toContainEqual(query) - - expect(events.query.deleted).toHaveBeenCalledTimes(1) - expect(events.query.deleted).toHaveBeenCalledWith(datasource, query) - }) - }) - - describe("read", () => { - it("should be able to list queries", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - const queries = await config.api.query.fetch() - expect(queries).toContainEqual(query) - }) - - it("should strip sensitive fields for prod apps", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").toString(), - }, - }) - - await config.api.application.publish(config.getAppId()) - const prodQuery = await config.api.query.getProd(query._id!) - - expect(prodQuery._id).toEqual(query._id) - expect(prodQuery.fields).toBeUndefined() - expect(prodQuery.parameters).toBeUndefined() - expect(prodQuery.schema).toBeDefined() - }) - - isPostgres && - it("should be able to handle a JSON aggregate with newlines", async () => { - const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)` - const query = await createQuery({ - fields: { - sql: client(tableName) - .select([ - "*", - client.raw( - `${jsonStatement} as json,\n${jsonStatement} as json2` - ), - ]) - .toString(), - }, - }) - const res = await config.api.query.execute( - query._id!, - {}, - { - status: 200, - } - ) - expect(res).toBeDefined() - }) - }) - }) - - describe("preview", () => { - it("should be able to preview a query", async () => { - const request: QueryPreview = { + async function createQuery( + query: Partial, + expectations?: Expectations + ): Promise { + const defaultQuery: Query = { datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: client(tableName).where({ id: 1 }).toString(), - }, + name: "New Query", parameters: [], - transformer: "return data", - name: datasource.name!, + fields: {}, schema: {}, + queryVerb: "read", + transformer: "return data", readable: true, } - const response = await config.api.query.preview(request) - expect(response.schema).toEqual({ - birthday: { - name: "birthday", - type: "string", - }, - id: { - name: "id", - type: "number", - }, - name: { - name: "name", - type: "string", - }, - number: { - name: "number", - type: "string", - }, - }) - expect(response.rows).toEqual([ - { - birthday: null, - id: 1, - name: "one", - number: null, - }, - ]) - expect(events.query.previewed).toHaveBeenCalledTimes(1) + if (query.fields?.sql && typeof query.fields.sql !== "string") { + throw new Error("Unable to create with knex structure in 'sql' field") + } + return await config.api.query.save( + { ...defaultQuery, ...query }, + expectations + ) + } + + beforeAll(async () => { + const ds = await dsProvider() + rawDatasource = ds.rawDatasource! + datasource = ds.datasource! + client = ds.client! }) - it("should update schema when column type changes from number to string", async () => { - const tableName = "schema_change_test" - await client.schema.dropTableIfExists(tableName) + beforeEach(async () => { + // The Datasource API doesn ot return the password, but we need it later to + // connect to the underlying database, so we fill it back in here. + datasource.config!.password = rawDatasource.config!.password + tableName = generator.guid() + + await client.schema.dropTableIfExists(tableName) await client.schema.createTable(tableName, table => { table.increments("id").primary() table.string("name") - table.integer("data") + table.timestamp("birthday") + table.integer("number") }) - await client(tableName).insert({ - name: "test", - data: 123, - }) - - const firstPreview = await config.api.query.preview({ - datasourceId: datasource._id!, - name: "Test Query", - queryVerb: "read", - fields: { - sql: client(tableName).select("*").toString(), - }, - parameters: [], - transformer: "return data", - schema: {}, - readable: true, - }) - - expect(firstPreview.schema).toEqual( - expect.objectContaining({ - data: { type: "number", name: "data" }, - }) - ) - - await client(tableName).delete() - await client.schema.alterTable(tableName, table => { - table.string("data").alter() - }) - - await client(tableName).insert({ - name: "test", - data: "string value", - }) - - const secondPreview = await config.api.query.preview({ - datasourceId: datasource._id!, - name: "Test Query", - queryVerb: "read", - fields: { - sql: client(tableName).select("*").toString(), - }, - parameters: [], - transformer: "return data", - schema: firstPreview.schema, - readable: true, - }) - - expect(secondPreview.schema).toEqual( - expect.objectContaining({ - data: { type: "string", name: "data" }, - }) - ) - }) - - it("should work with static variables", async () => { - const datasource = await config.api.datasource.create({ - ...rawDatasource, - config: { - ...rawDatasource.config, - staticVariables: { - foo: "bar", - }, - }, - }) - - const request: QueryPreview = { - datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, - }, - parameters: [], - transformer: "return data", - name: datasource.name!, - schema: {}, - readable: true, - } - - const response = await config.api.query.preview(request) - - let key = isOracle ? "FOO" : "foo" - expect(response.schema).toEqual({ - [key]: { - name: key, - type: "string", - }, - }) - - expect(response.rows).toEqual([ - { - [key]: "bar", - }, + await client(tableName).insert([ + { name: "one" }, + { name: "two" }, + { name: "three" }, + { name: "four" }, + { name: "five" }, ]) + + jest.clearAllMocks() }) - it("should work with dynamic variables", async () => { - const datasource = await config.api.datasource.create(rawDatasource) - - const basedOnQuery = await createQuery({ - datasourceId: datasource._id!, - fields: { - sql: client(tableName).select("name").where({ id: 1 }).toString(), - }, - }) - - await config.api.datasource.update({ - ...datasource, - config: { - ...datasource.config, - dynamicVariables: [ - { - queryId: basedOnQuery._id!, - name: "foo", - value: "{{ data[0].name }}", + describe("query admin", () => { + describe("create", () => { + it("should be able to create a query", async () => { + const query = await createQuery({ + name: "New Query", + fields: { + sql: client(tableName).select("*").toString(), }, - ], - }, - }) + }) - const preview = await config.api.query.preview({ - datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, - }, - parameters: [], - transformer: "return data", - name: datasource.name!, - schema: {}, - readable: true, - }) - - let key = isOracle ? "FOO" : "foo" - expect(preview.schema).toEqual({ - [key]: { - name: key, - type: "string", - }, - }) - - expect(preview.rows).toEqual([ - { - [key]: "one", - }, - ]) - }) - - it("should handle the dynamic base query being deleted", async () => { - const datasource = await config.api.datasource.create(rawDatasource) - - const basedOnQuery = await createQuery({ - datasourceId: datasource._id!, - fields: { - sql: client(tableName).select("name").where({ id: 1 }).toString(), - }, - }) - - await config.api.datasource.update({ - ...datasource, - config: { - ...datasource.config, - dynamicVariables: [ - { - queryId: basedOnQuery._id!, - name: "foo", - value: "{{ data[0].name }}", + expect(query).toMatchObject({ + datasourceId: datasource._id!, + name: "New Query", + parameters: [], + fields: { + sql: client(tableName).select("*").toString(), }, - ], - }, + schema: {}, + queryVerb: "read", + transformer: "return data", + readable: true, + createdAt: expect.any(String), + updatedAt: expect.any(String), + }) + + expect(events.query.created).toHaveBeenCalledTimes(1) + expect(events.query.updated).not.toHaveBeenCalled() + }) }) - await config.api.query.delete(basedOnQuery) - - const preview = await config.api.query.preview({ - datasourceId: datasource._id!, - queryVerb: "read", - fields: { - sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, - }, - parameters: [], - transformer: "return data", - name: datasource.name!, - schema: {}, - readable: true, - }) - - let key = isOracle ? "FOO" : "foo" - expect(preview.schema).toEqual({ - [key]: { - name: key, - type: "string", - }, - }) - - expect(preview.rows).toEqual([{ [key]: isMSSQL ? "" : null }]) - }) - }) - - describe("query verbs", () => { - describe("create", () => { - it("should be able to insert with bindings", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), - }, - parameters: [ - { - name: "foo", - default: "bar", + describe("update", () => { + it("should be able to update a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), }, - ], - queryVerb: "create", + }) + + jest.clearAllMocks() + + const updatedQuery = await config.api.query.save({ + ...query, + name: "Updated Query", + fields: { + sql: client(tableName).where({ id: 1 }).toString(), + }, + }) + + expect(updatedQuery).toMatchObject({ + datasourceId: datasource._id!, + name: "Updated Query", + parameters: [], + fields: { + sql: client(tableName).where({ id: 1 }).toString(), + }, + schema: {}, + queryVerb: "read", + transformer: "return data", + readable: true, + }) + + expect(events.query.created).not.toHaveBeenCalled() + expect(events.query.updated).toHaveBeenCalledTimes(1) + }) + }) + + describe("delete", () => { + it("should be able to delete a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), + }, + }) + + await config.api.query.delete(query) + await config.api.query.get(query._id!, { status: 404 }) + + const queries = await config.api.query.fetch() + expect(queries).not.toContainEqual(query) + + expect(events.query.deleted).toHaveBeenCalledTimes(1) + expect(events.query.deleted).toHaveBeenCalledWith(datasource, query) + }) + }) + + describe("read", () => { + it("should be able to list queries", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), + }, + }) + + const queries = await config.api.query.fetch() + expect(queries).toContainEqual(query) }) - const result = await config.api.query.execute(query._id!, { - parameters: { - foo: "baz", - }, + it("should strip sensitive fields for prod apps", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").toString(), + }, + }) + + await config.api.application.publish(config.getAppId()) + const prodQuery = await config.api.query.getProd(query._id!) + + expect(prodQuery._id).toEqual(query._id) + expect(prodQuery.fields).toBeUndefined() + expect(prodQuery.parameters).toBeUndefined() + expect(prodQuery.schema).toBeDefined() }) - expect(result.data).toEqual([ - { - created: true, - }, - ]) - - const rows = await client(tableName).where({ name: "baz" }).select() - expect(rows).toHaveLength(1) - for (const row of rows) { - expect(row).toMatchObject({ name: "baz" }) - } - }) - - it("should not allow handlebars as parameters", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), - }, - parameters: [ - { - name: "foo", - default: "bar", - }, - ], - queryVerb: "create", - }) - - await config.api.query.execute( - query._id!, - { - parameters: { - foo: "{{ 'test' }}", - }, - }, - { - status: 400, - body: { - message: - "Parameter 'foo' input contains a handlebars binding - this is not allowed.", - }, - } - ) - }) - - // Oracle doesn't automatically coerce strings into dates. - !isOracle && - it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])( - "should coerce %s into a date", - async datetimeStr => { - const date = new Date(datetimeStr) + isPostgres && + it("should be able to handle a JSON aggregate with newlines", async () => { + const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)` const query = await createQuery({ fields: { sql: client(tableName) - .insert({ - name: "foo", - birthday: client.raw("{{ birthday }}"), - }) + .select([ + "*", + client.raw( + `${jsonStatement} as json,\n${jsonStatement} as json2` + ), + ]) .toString(), }, - parameters: [ - { - name: "birthday", - default: "", - }, - ], - queryVerb: "create", }) + const res = await config.api.query.execute( + query._id!, + {}, + { + status: 200, + } + ) + expect(res).toBeDefined() + }) + }) + }) - const result = await config.api.query.execute(query._id!, { - parameters: { birthday: datetimeStr }, - }) + describe("preview", () => { + it("should be able to preview a query", async () => { + const request: QueryPreview = { + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: client(tableName).where({ id: 1 }).toString(), + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + } + const response = await config.api.query.preview(request) + expect(response.schema).toEqual({ + birthday: { + name: "birthday", + type: "string", + }, + id: { + name: "id", + type: "number", + }, + name: { + name: "name", + type: "string", + }, + number: { + name: "number", + type: "string", + }, + }) + expect(response.rows).toEqual([ + { + birthday: null, + id: 1, + name: "one", + number: null, + }, + ]) + expect(events.query.previewed).toHaveBeenCalledTimes(1) + }) - expect(result.data).toEqual([{ created: true }]) + it("should update schema when column type changes from number to string", async () => { + const tableName = "schema_change_test" + await client.schema.dropTableIfExists(tableName) - const rows = await client(tableName) - .where({ birthday: datetimeStr }) - .select() - expect(rows).toHaveLength(1) + await client.schema.createTable(tableName, table => { + table.increments("id").primary() + table.string("name") + table.integer("data") + }) - for (const row of rows) { - expect(new Date(row.birthday)).toEqual(date) - } - } + await client(tableName).insert({ + name: "test", + data: 123, + }) + + const firstPreview = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "Test Query", + queryVerb: "read", + fields: { + sql: client(tableName).select("*").toString(), + }, + parameters: [], + transformer: "return data", + schema: {}, + readable: true, + }) + + expect(firstPreview.schema).toEqual( + expect.objectContaining({ + data: { type: "number", name: "data" }, + }) ) - it.each(["2021,02,05", "202205-1500"])( - "should not coerce %s as a date", - async notDateStr => { + await client(tableName).delete() + await client.schema.alterTable(tableName, table => { + table.string("data").alter() + }) + + await client(tableName).insert({ + name: "test", + data: "string value", + }) + + const secondPreview = await config.api.query.preview({ + datasourceId: datasource._id!, + name: "Test Query", + queryVerb: "read", + fields: { + sql: client(tableName).select("*").toString(), + }, + parameters: [], + transformer: "return data", + schema: firstPreview.schema, + readable: true, + }) + + expect(secondPreview.schema).toEqual( + expect.objectContaining({ + data: { type: "string", name: "data" }, + }) + ) + }) + + it("should work with static variables", async () => { + const datasource = await config.api.datasource.create({ + ...rawDatasource, + config: { + ...rawDatasource.config, + staticVariables: { + foo: "bar", + }, + }, + }) + + const request: QueryPreview = { + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + } + + const response = await config.api.query.preview(request) + + let key = isOracle ? "FOO" : "foo" + expect(response.schema).toEqual({ + [key]: { + name: key, + type: "string", + }, + }) + + expect(response.rows).toEqual([ + { + [key]: "bar", + }, + ]) + }) + + it("should work with dynamic variables", async () => { + const datasource = await config.api.datasource.create(rawDatasource) + + const basedOnQuery = await createQuery({ + datasourceId: datasource._id!, + fields: { + sql: client(tableName).select("name").where({ id: 1 }).toString(), + }, + }) + + await config.api.datasource.update({ + ...datasource, + config: { + ...datasource.config, + dynamicVariables: [ + { + queryId: basedOnQuery._id!, + name: "foo", + value: "{{ data[0].name }}", + }, + ], + }, + }) + + const preview = await config.api.query.preview({ + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + }) + + let key = isOracle ? "FOO" : "foo" + expect(preview.schema).toEqual({ + [key]: { + name: key, + type: "string", + }, + }) + + expect(preview.rows).toEqual([ + { + [key]: "one", + }, + ]) + }) + + it("should handle the dynamic base query being deleted", async () => { + const datasource = await config.api.datasource.create(rawDatasource) + + const basedOnQuery = await createQuery({ + datasourceId: datasource._id!, + fields: { + sql: client(tableName).select("name").where({ id: 1 }).toString(), + }, + }) + + await config.api.datasource.update({ + ...datasource, + config: { + ...datasource.config, + dynamicVariables: [ + { + queryId: basedOnQuery._id!, + name: "foo", + value: "{{ data[0].name }}", + }, + ], + }, + }) + + await config.api.query.delete(basedOnQuery) + + const preview = await config.api.query.preview({ + datasourceId: datasource._id!, + queryVerb: "read", + fields: { + sql: `SELECT '{{ foo }}' AS foo ${isOracle ? "FROM dual" : ""}`, + }, + parameters: [], + transformer: "return data", + name: datasource.name!, + schema: {}, + readable: true, + }) + + let key = isOracle ? "FOO" : "foo" + expect(preview.schema).toEqual({ + [key]: { + name: key, + type: "string", + }, + }) + + expect(preview.rows).toEqual([{ [key]: isMSSQL ? "" : null }]) + }) + }) + + describe("query verbs", () => { + describe("create", () => { + it("should be able to insert with bindings", async () => { const query = await createQuery({ fields: { - sql: client(tableName) - .insert({ name: client.raw("{{ name }}") }) - .toString(), + sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), }, parameters: [ { - name: "name", - default: "", + name: "foo", + default: "bar", }, ], queryVerb: "create", @@ -598,274 +494,442 @@ datasourceDescribe( const result = await config.api.query.execute(query._id!, { parameters: { - name: notDateStr, + foo: "baz", }, }) - expect(result.data).toEqual([{ created: true }]) + expect(result.data).toEqual([ + { + created: true, + }, + ]) - const rows = await client(tableName) - .where({ name: notDateStr }) - .select() + const rows = await client(tableName).where({ name: "baz" }).select() expect(rows).toHaveLength(1) - } - ) - }) - - describe("read", () => { - it("should execute a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).select("*").orderBy("id").toString(), - }, + for (const row of rows) { + expect(row).toMatchObject({ name: "baz" }) + } }) - const result = await config.api.query.execute(query._id!) + it("should not allow handlebars as parameters", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).insert({ name: "{{ foo }}" }).toString(), + }, + parameters: [ + { + name: "foo", + default: "bar", + }, + ], + queryVerb: "create", + }) - expect(result.data).toEqual([ - { - id: 1, - name: "one", - birthday: null, - number: null, - }, - { - id: 2, - name: "two", - birthday: null, - number: null, - }, - { - id: 3, - name: "three", - birthday: null, - number: null, - }, - { - id: 4, - name: "four", - birthday: null, - number: null, - }, - { - id: 5, - name: "five", - birthday: null, - number: null, - }, - ]) + await config.api.query.execute( + query._id!, + { + parameters: { + foo: "{{ 'test' }}", + }, + }, + { + status: 400, + body: { + message: + "Parameter 'foo' input contains a handlebars binding - this is not allowed.", + }, + } + ) + }) + + // Oracle doesn't automatically coerce strings into dates. + !isOracle && + it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])( + "should coerce %s into a date", + async datetimeStr => { + const date = new Date(datetimeStr) + const query = await createQuery({ + fields: { + sql: client(tableName) + .insert({ + name: "foo", + birthday: client.raw("{{ birthday }}"), + }) + .toString(), + }, + parameters: [ + { + name: "birthday", + default: "", + }, + ], + queryVerb: "create", + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { birthday: datetimeStr }, + }) + + expect(result.data).toEqual([{ created: true }]) + + const rows = await client(tableName) + .where({ birthday: datetimeStr }) + .select() + expect(rows).toHaveLength(1) + + for (const row of rows) { + expect(new Date(row.birthday)).toEqual(date) + } + } + ) + + it.each(["2021,02,05", "202205-1500"])( + "should not coerce %s as a date", + async notDateStr => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .insert({ name: client.raw("{{ name }}") }) + .toString(), + }, + parameters: [ + { + name: "name", + default: "", + }, + ], + queryVerb: "create", + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { + name: notDateStr, + }, + }) + + expect(result.data).toEqual([{ created: true }]) + + const rows = await client(tableName) + .where({ name: notDateStr }) + .select() + expect(rows).toHaveLength(1) + } + ) }) - it("should be able to transform a query", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).where({ id: 1 }).select("*").toString(), - }, - transformer: ` + describe("read", () => { + it("should execute a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).select("*").orderBy("id").toString(), + }, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + id: 1, + name: "one", + birthday: null, + number: null, + }, + { + id: 2, + name: "two", + birthday: null, + number: null, + }, + { + id: 3, + name: "three", + birthday: null, + number: null, + }, + { + id: 4, + name: "four", + birthday: null, + number: null, + }, + { + id: 5, + name: "five", + birthday: null, + number: null, + }, + ]) + }) + + it("should be able to transform a query", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).where({ id: 1 }).select("*").toString(), + }, + transformer: ` data[0].id = data[0].id + 1; return data; `, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + id: 2, + name: "one", + birthday: null, + number: null, + }, + ]) }) - const result = await config.api.query.execute(query._id!) + it("should coerce numeric bindings", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .where({ id: client.raw("{{ id }}") }) + .select("*") + .toString(), + }, + parameters: [ + { + name: "id", + default: "", + }, + ], + }) - expect(result.data).toEqual([ - { - id: 2, - name: "one", - birthday: null, - number: null, - }, - ]) + const result = await config.api.query.execute(query._id!, { + parameters: { + id: "1", + }, + }) + + expect(result.data).toEqual([ + { + id: 1, + name: "one", + birthday: null, + number: null, + }, + ]) + }) }) - it("should coerce numeric bindings", async () => { + describe("update", () => { + it("should be able to update rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .update({ name: client.raw("{{ name }}") }) + .where({ id: client.raw("{{ id }}") }) + .toString(), + }, + parameters: [ + { + name: "id", + default: "", + }, + { + name: "name", + default: "updated", + }, + ], + queryVerb: "update", + }) + + await config.api.query.execute(query._id!, { + parameters: { + id: "1", + name: "foo", + }, + }) + + const rows = await client(tableName).where({ id: 1 }).select() + expect(rows).toEqual([ + { id: 1, name: "foo", birthday: null, number: null }, + ]) + }) + + it("should be able to execute an update that updates no rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .update({ name: "updated" }) + .where({ id: 100 }) + .toString(), + }, + queryVerb: "update", + }) + + await config.api.query.execute(query._id!) + + const rows = await client(tableName).select() + for (const row of rows) { + expect(row.name).not.toEqual("updated") + } + }) + + it("should be able to execute a delete that deletes no rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName).where({ id: 100 }).delete().toString(), + }, + queryVerb: "delete", + }) + + await config.api.query.execute(query._id!) + + const rows = await client(tableName).select() + expect(rows).toHaveLength(5) + }) + }) + + describe("delete", () => { + it("should be able to delete rows", async () => { + const query = await createQuery({ + fields: { + sql: client(tableName) + .where({ id: client.raw("{{ id }}") }) + .delete() + .toString(), + }, + parameters: [ + { + name: "id", + default: "", + }, + ], + queryVerb: "delete", + }) + + await config.api.query.execute(query._id!, { + parameters: { + id: "1", + }, + }) + + const rows = await client(tableName).where({ id: 1 }).select() + expect(rows).toHaveLength(0) + }) + }) + }) + + describe("query through datasource", () => { + it("should be able to query the datasource", async () => { + const datasource = await config.api.datasource.create(rawDatasource) + + const entityId = tableName + await config.api.datasource.update({ + ...datasource, + entities: { + [entityId]: { + name: entityId, + schema: {}, + type: "table", + primary: ["id"], + sourceId: datasource._id!, + sourceType: TableSourceType.EXTERNAL, + }, + }, + }) + + const res = await config.api.datasource.query({ + endpoint: { + datasourceId: datasource._id!, + operation: Operation.READ, + entityId, + }, + resource: { + fields: ["id", "name"], + }, + filters: { + string: { + name: "two", + }, + }, + }) + expect(res).toHaveLength(1) + expect(res[0]).toEqual({ + id: 2, + name: "two", + // the use of table.* introduces the possibility of nulls being returned + birthday: null, + number: null, + }) + }) + + // this parameter really only impacts SQL queries + describe("confirm nullDefaultSupport", () => { + let queryParams: Partial + beforeAll(async () => { + queryParams = { + fields: { + sql: client(tableName) + .insert({ + name: client.raw("{{ bindingName }}"), + number: client.raw("{{ bindingNumber }}"), + }) + .toString(), + }, + parameters: [ + { + name: "bindingName", + default: "", + }, + { + name: "bindingNumber", + default: "", + }, + ], + queryVerb: "create", + } + }) + + it("should error for old queries", async () => { + const query = await createQuery(queryParams) + await config.api.query.save({ ...query, nullDefaultSupport: false }) + let error: string | undefined + try { + await config.api.query.execute(query._id!, { + parameters: { + bindingName: "testing", + }, + }) + } catch (err: any) { + error = err.message + } + if (isMSSQL || isOracle) { + expect(error).toBeUndefined() + } else { + expect(error).toBeDefined() + expect(error).toContain("integer") + } + }) + + it("should not error for new queries", async () => { + const query = await createQuery(queryParams) + const results = await config.api.query.execute(query._id!, { + parameters: { + bindingName: "testing", + }, + }) + expect(results).toEqual({ data: [{ created: true }] }) + }) + }) + }) + + describe("edge cases", () => { + it("should find rows with a binding containing a slash", async () => { + const slashValue = "1/10" + await client(tableName).insert([{ name: slashValue }]) + const query = await createQuery({ fields: { sql: client(tableName) - .where({ id: client.raw("{{ id }}") }) .select("*") - .toString(), - }, - parameters: [ - { - name: "id", - default: "", - }, - ], - }) - - const result = await config.api.query.execute(query._id!, { - parameters: { - id: "1", - }, - }) - - expect(result.data).toEqual([ - { - id: 1, - name: "one", - birthday: null, - number: null, - }, - ]) - }) - }) - - describe("update", () => { - it("should be able to update rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName) - .update({ name: client.raw("{{ name }}") }) - .where({ id: client.raw("{{ id }}") }) - .toString(), - }, - parameters: [ - { - name: "id", - default: "", - }, - { - name: "name", - default: "updated", - }, - ], - queryVerb: "update", - }) - - await config.api.query.execute(query._id!, { - parameters: { - id: "1", - name: "foo", - }, - }) - - const rows = await client(tableName).where({ id: 1 }).select() - expect(rows).toEqual([ - { id: 1, name: "foo", birthday: null, number: null }, - ]) - }) - - it("should be able to execute an update that updates no rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName) - .update({ name: "updated" }) - .where({ id: 100 }) - .toString(), - }, - queryVerb: "update", - }) - - await config.api.query.execute(query._id!) - - const rows = await client(tableName).select() - for (const row of rows) { - expect(row.name).not.toEqual("updated") - } - }) - - it("should be able to execute a delete that deletes no rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName).where({ id: 100 }).delete().toString(), - }, - queryVerb: "delete", - }) - - await config.api.query.execute(query._id!) - - const rows = await client(tableName).select() - expect(rows).toHaveLength(5) - }) - }) - - describe("delete", () => { - it("should be able to delete rows", async () => { - const query = await createQuery({ - fields: { - sql: client(tableName) - .where({ id: client.raw("{{ id }}") }) - .delete() - .toString(), - }, - parameters: [ - { - name: "id", - default: "", - }, - ], - queryVerb: "delete", - }) - - await config.api.query.execute(query._id!, { - parameters: { - id: "1", - }, - }) - - const rows = await client(tableName).where({ id: 1 }).select() - expect(rows).toHaveLength(0) - }) - }) - }) - - describe("query through datasource", () => { - it("should be able to query the datasource", async () => { - const datasource = await config.api.datasource.create(rawDatasource) - - const entityId = tableName - await config.api.datasource.update({ - ...datasource, - entities: { - [entityId]: { - name: entityId, - schema: {}, - type: "table", - primary: ["id"], - sourceId: datasource._id!, - sourceType: TableSourceType.EXTERNAL, - }, - }, - }) - - const res = await config.api.datasource.query({ - endpoint: { - datasourceId: datasource._id!, - operation: Operation.READ, - entityId, - }, - resource: { - fields: ["id", "name"], - }, - filters: { - string: { - name: "two", - }, - }, - }) - expect(res).toHaveLength(1) - expect(res[0]).toEqual({ - id: 2, - name: "two", - // the use of table.* introduces the possibility of nulls being returned - birthday: null, - number: null, - }) - }) - - // this parameter really only impacts SQL queries - describe("confirm nullDefaultSupport", () => { - let queryParams: Partial - beforeAll(async () => { - queryParams = { - fields: { - sql: client(tableName) - .insert({ - name: client.raw("{{ bindingName }}"), - number: client.raw("{{ bindingNumber }}"), - }) + .where("name", "=", client.raw("{{ bindingName }}")) .toString(), }, parameters: [ @@ -873,76 +937,18 @@ datasourceDescribe( name: "bindingName", default: "", }, - { - name: "bindingNumber", - default: "", - }, ], - queryVerb: "create", - } - }) - - it("should error for old queries", async () => { - const query = await createQuery(queryParams) - await config.api.query.save({ ...query, nullDefaultSupport: false }) - let error: string | undefined - try { - await config.api.query.execute(query._id!, { - parameters: { - bindingName: "testing", - }, - }) - } catch (err: any) { - error = err.message - } - if (isMSSQL || isOracle) { - expect(error).toBeUndefined() - } else { - expect(error).toBeDefined() - expect(error).toContain("integer") - } - }) - - it("should not error for new queries", async () => { - const query = await createQuery(queryParams) + queryVerb: "read", + }) const results = await config.api.query.execute(query._id!, { parameters: { - bindingName: "testing", + bindingName: slashValue, }, }) - expect(results).toEqual({ data: [{ created: true }] }) + expect(results).toBeDefined() + expect(results.data.length).toEqual(1) }) }) - }) - - describe("edge cases", () => { - it("should find rows with a binding containing a slash", async () => { - const slashValue = "1/10" - await client(tableName).insert([{ name: slashValue }]) - - const query = await createQuery({ - fields: { - sql: client(tableName) - .select("*") - .where("name", "=", client.raw("{{ bindingName }}")) - .toString(), - }, - parameters: [ - { - name: "bindingName", - default: "", - }, - ], - queryVerb: "read", - }) - const results = await config.api.query.execute(query._id!, { - parameters: { - bindingName: slashValue, - }, - }) - expect(results).toBeDefined() - expect(results.data.length).toEqual(1) - }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts index 44d1553f9b..a37957fe7e 100644 --- a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts +++ b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts @@ -9,465 +9,698 @@ import { generator } from "@budibase/backend-core/tests" const expectValidId = expect.stringMatching(/^\w{24}$/) const expectValidBsonObjectId = expect.any(BSON.ObjectId) -datasourceDescribe( - { name: "/queries", only: [DatabaseName.MONGODB] }, - ({ config, dsProvider }) => { - let collection: string - let datasource: Datasource +const descriptions = datasourceDescribe({ only: [DatabaseName.MONGODB] }) - async function createQuery(query: Partial): Promise { - const defaultQuery: Query = { - datasourceId: datasource._id!, - name: "New Query", - parameters: [], - fields: {}, - schema: {}, - queryVerb: "read", - transformer: "return data", - readable: true, - } - const combinedQuery = { ...defaultQuery, ...query } - if ( - combinedQuery.fields && - combinedQuery.fields.extra && - !combinedQuery.fields.extra.collection - ) { - combinedQuery.fields.extra.collection = collection - } - return await config.api.query.save(combinedQuery) - } +if (descriptions.length) { + describe.each(descriptions)( + "/queries ($dbName)", + ({ config, dsProvider }) => { + let collection: string + let datasource: Datasource - async function withClient( - callback: (client: MongoClient) => Promise - ): Promise { - const client = new MongoClient(datasource.config!.connectionString) - await client.connect() - try { - return await callback(client) - } finally { - await client.close() - } - } - - async function withDb(callback: (db: Db) => Promise): Promise { - return await withClient(async client => { - return await callback(client.db(datasource.config!.db)) - }) - } - - async function withCollection( - callback: (collection: Collection) => Promise - ): Promise { - return await withDb(async db => { - return await callback(db.collection(collection)) - }) - } - - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - }) - - beforeEach(async () => { - collection = generator.guid() - await withCollection(async collection => { - await collection.insertMany([ - { name: "one" }, - { name: "two" }, - { name: "three" }, - { name: "four" }, - { name: "five" }, - ]) - }) - }) - - afterEach(async () => { - await withCollection(collection => collection.drop()) - }) - - describe("preview", () => { - it("should generate a nested schema with an empty array", async () => { - const name = generator.guid() - await withCollection( - async collection => await collection.insertOne({ name, nested: [] }) - ) - - const preview = await config.api.query.preview({ + async function createQuery(query: Partial): Promise { + const defaultQuery: Query = { + datasourceId: datasource._id!, name: "New Query", - datasourceId: datasource._id!, - fields: { - json: { - name: { $eq: name }, - }, - extra: { - collection, - actionType: "findOne", - }, - }, + parameters: [], + fields: {}, schema: {}, queryVerb: "read", - parameters: [], transformer: "return data", readable: true, - }) - - expect(preview).toEqual({ - nestedSchemaFields: {}, - rows: [{ _id: expect.any(String), name, nested: [] }], - schema: { - _id: { - type: "string", - name: "_id", - }, - name: { - type: "string", - name: "name", - }, - nested: { - type: "array", - name: "nested", - }, - }, - }) - }) - - it("should update schema when structure changes from object to array", async () => { - const name = generator.guid() - - await withCollection(async collection => { - await collection.insertOne({ name, field: { subfield: "value" } }) - }) - - const firstPreview = await config.api.query.preview({ - name: "Test Query", - datasourceId: datasource._id!, - fields: { - json: { name: { $eq: name } }, - extra: { - collection, - actionType: "findOne", - }, - }, - schema: {}, - queryVerb: "read", - parameters: [], - transformer: "return data", - readable: true, - }) - - expect(firstPreview.schema).toEqual( - expect.objectContaining({ - field: { type: "json", name: "field" }, - }) - ) - - await withCollection(async collection => { - await collection.updateOne( - { name }, - { $set: { field: ["value1", "value2"] } } - ) - }) - - const secondPreview = await config.api.query.preview({ - name: "Test Query", - datasourceId: datasource._id!, - fields: { - json: { name: { $eq: name } }, - extra: { - collection, - actionType: "findOne", - }, - }, - schema: firstPreview.schema, - queryVerb: "read", - parameters: [], - transformer: "return data", - readable: true, - }) - - expect(secondPreview.schema).toEqual( - expect.objectContaining({ - field: { type: "array", name: "field" }, - }) - ) - }) - - it("should generate a nested schema based on all of the nested items", async () => { - const name = generator.guid() - const item = { - name, - contacts: [ - { - address: "123 Lane", - }, - { - address: "456 Drive", - }, - { - postcode: "BT1 12N", - lat: 54.59, - long: -5.92, - }, - { - city: "Belfast", - }, - { - address: "789 Avenue", - phoneNumber: "0800-999-5555", - }, - { - name: "Name", - isActive: false, - }, - ], } + const combinedQuery = { ...defaultQuery, ...query } + if ( + combinedQuery.fields && + combinedQuery.fields.extra && + !combinedQuery.fields.extra.collection + ) { + combinedQuery.fields.extra.collection = collection + } + return await config.api.query.save(combinedQuery) + } - await withCollection(collection => collection.insertOne(item)) + async function withClient( + callback: (client: MongoClient) => Promise + ): Promise { + const client = new MongoClient(datasource.config!.connectionString) + await client.connect() + try { + return await callback(client) + } finally { + await client.close() + } + } - const preview = await config.api.query.preview({ - name: "New Query", - datasourceId: datasource._id!, - fields: { - json: { - name: { $eq: name }, - }, - extra: { - collection, - actionType: "findOne", - }, - }, - schema: {}, - queryVerb: "read", - parameters: [], - transformer: "return data", - readable: true, + async function withDb(callback: (db: Db) => Promise): Promise { + return await withClient(async client => { + return await callback(client.db(datasource.config!.db)) }) + } - expect(preview).toEqual({ - nestedSchemaFields: { - contacts: { - address: { + async function withCollection( + callback: (collection: Collection) => Promise + ): Promise { + return await withDb(async db => { + return await callback(db.collection(collection)) + }) + } + + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + }) + + beforeEach(async () => { + collection = generator.guid() + await withCollection(async collection => { + await collection.insertMany([ + { name: "one" }, + { name: "two" }, + { name: "three" }, + { name: "four" }, + { name: "five" }, + ]) + }) + }) + + afterEach(async () => { + await withCollection(collection => collection.drop()) + }) + + describe("preview", () => { + it("should generate a nested schema with an empty array", async () => { + const name = generator.guid() + await withCollection( + async collection => await collection.insertOne({ name, nested: [] }) + ) + + const preview = await config.api.query.preview({ + name: "New Query", + datasourceId: datasource._id!, + fields: { + json: { + name: { $eq: name }, + }, + extra: { + collection, + actionType: "findOne", + }, + }, + schema: {}, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(preview).toEqual({ + nestedSchemaFields: {}, + rows: [{ _id: expect.any(String), name, nested: [] }], + schema: { + _id: { type: "string", - name: "address", - }, - postcode: { - type: "string", - name: "postcode", - }, - lat: { - type: "number", - name: "lat", - }, - long: { - type: "number", - name: "long", - }, - city: { - type: "string", - name: "city", - }, - phoneNumber: { - type: "string", - name: "phoneNumber", + name: "_id", }, name: { type: "string", name: "name", }, - isActive: { - type: "boolean", - name: "isActive", + nested: { + type: "array", + name: "nested", }, }, - }, - rows: [{ ...item, _id: expect.any(String) }], - schema: { - _id: { type: "string", name: "_id" }, - name: { type: "string", name: "name" }, - contacts: { type: "json", name: "contacts", subtype: "array" }, - }, - }) - }) - }) - - describe("execute", () => { - it("a count query", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "count", - }, - }, + }) }) - const result = await config.api.query.execute(query._id!) + it("should update schema when structure changes from object to array", async () => { + const name = generator.guid() - expect(result.data).toEqual([{ value: 5 }]) - }) + await withCollection(async collection => { + await collection.insertOne({ name, field: { subfield: "value" } }) + }) - it("should be able to updateOne by ObjectId", async () => { - const insertResult = await withCollection(c => - c.insertOne({ name: "one" }) - ) - const query = await createQuery({ - fields: { - json: { - filter: { - _id: { $eq: `ObjectId("${insertResult.insertedId}")` }, + const firstPreview = await config.api.query.preview({ + name: "Test Query", + datasourceId: datasource._id!, + fields: { + json: { name: { $eq: name } }, + extra: { + collection, + actionType: "findOne", }, - update: { $set: { name: "newName" } }, }, - extra: { - actionType: "updateOne", + schema: {}, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(firstPreview.schema).toEqual( + expect.objectContaining({ + field: { type: "json", name: "field" }, + }) + ) + + await withCollection(async collection => { + await collection.updateOne( + { name }, + { $set: { field: ["value1", "value2"] } } + ) + }) + + const secondPreview = await config.api.query.preview({ + name: "Test Query", + datasourceId: datasource._id!, + fields: { + json: { name: { $eq: name } }, + extra: { + collection, + actionType: "findOne", + }, }, - }, - queryVerb: "update", + schema: firstPreview.schema, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(secondPreview.schema).toEqual( + expect.objectContaining({ + field: { type: "array", name: "field" }, + }) + ) }) - const result = await config.api.query.execute(query._id!) + it("should generate a nested schema based on all of the nested items", async () => { + const name = generator.guid() + const item = { + name, + contacts: [ + { + address: "123 Lane", + }, + { + address: "456 Drive", + }, + { + postcode: "BT1 12N", + lat: 54.59, + long: -5.92, + }, + { + city: "Belfast", + }, + { + address: "789 Avenue", + phoneNumber: "0800-999-5555", + }, + { + name: "Name", + isActive: false, + }, + ], + } - expect(result.data).toEqual([ - { - acknowledged: true, - matchedCount: 1, - modifiedCount: 1, - upsertedCount: 0, - upsertedId: null, - }, - ]) + await withCollection(collection => collection.insertOne(item)) - await withCollection(async collection => { - const doc = await collection.findOne({ name: { $eq: "newName" } }) - expect(doc).toEqual({ - _id: insertResult.insertedId, - name: "newName", + const preview = await config.api.query.preview({ + name: "New Query", + datasourceId: datasource._id!, + fields: { + json: { + name: { $eq: name }, + }, + extra: { + collection, + actionType: "findOne", + }, + }, + schema: {}, + queryVerb: "read", + parameters: [], + transformer: "return data", + readable: true, + }) + + expect(preview).toEqual({ + nestedSchemaFields: { + contacts: { + address: { + type: "string", + name: "address", + }, + postcode: { + type: "string", + name: "postcode", + }, + lat: { + type: "number", + name: "lat", + }, + long: { + type: "number", + name: "long", + }, + city: { + type: "string", + name: "city", + }, + phoneNumber: { + type: "string", + name: "phoneNumber", + }, + name: { + type: "string", + name: "name", + }, + isActive: { + type: "boolean", + name: "isActive", + }, + }, + }, + rows: [{ ...item, _id: expect.any(String) }], + schema: { + _id: { type: "string", name: "_id" }, + name: { type: "string", name: "name" }, + contacts: { type: "json", name: "contacts", subtype: "array" }, + }, }) }) }) - it("a count query with a transformer", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "count", + describe("execute", () => { + it("a count query", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "count", + }, }, - }, - transformer: "return data + 1", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([{ value: 5 }]) }) - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([{ value: 6 }]) - }) - - it("a find query", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "find", + it("should be able to updateOne by ObjectId", async () => { + const insertResult = await withCollection(c => + c.insertOne({ name: "one" }) + ) + const query = await createQuery({ + fields: { + json: { + filter: { + _id: { $eq: `ObjectId("${insertResult.insertedId}")` }, + }, + update: { $set: { name: "newName" } }, + }, + extra: { + actionType: "updateOne", + }, }, - }, + queryVerb: "update", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + acknowledged: true, + matchedCount: 1, + modifiedCount: 1, + upsertedCount: 0, + upsertedId: null, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ name: { $eq: "newName" } }) + expect(doc).toEqual({ + _id: insertResult.insertedId, + name: "newName", + }) + }) }) - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([ - { _id: expectValidId, name: "one" }, - { _id: expectValidId, name: "two" }, - { _id: expectValidId, name: "three" }, - { _id: expectValidId, name: "four" }, - { _id: expectValidId, name: "five" }, - ]) - }) - - it("a findOne query", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "findOne", + it("a count query with a transformer", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "count", + }, }, - }, + transformer: "return data + 1", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([{ value: 6 }]) }) - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([{ _id: expectValidId, name: "one" }]) - }) - - it("a findOneAndUpdate query", async () => { - const query = await createQuery({ - fields: { - json: { - filter: { name: { $eq: "one" } }, - update: { $set: { name: "newName" } }, + it("a find query", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "find", + }, }, - extra: { - actionType: "findOneAndUpdate", - }, - }, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { _id: expectValidId, name: "one" }, + { _id: expectValidId, name: "two" }, + { _id: expectValidId, name: "three" }, + { _id: expectValidId, name: "four" }, + { _id: expectValidId, name: "five" }, + ]) }) - const result = await config.api.query.execute(query._id!) + it("a findOne query", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "findOne", + }, + }, + }) - expect(result.data).toEqual([ - { - lastErrorObject: { n: 1, updatedExisting: true }, - ok: 1, - value: { _id: expectValidId, name: "one" }, - }, - ]) + const result = await config.api.query.execute(query._id!) - await withCollection(async collection => { - expect(await collection.countDocuments()).toBe(5) + expect(result.data).toEqual([{ _id: expectValidId, name: "one" }]) + }) - const doc = await collection.findOne({ name: { $eq: "newName" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - name: "newName", + it("a findOneAndUpdate query", async () => { + const query = await createQuery({ + fields: { + json: { + filter: { name: { $eq: "one" } }, + update: { $set: { name: "newName" } }, + }, + extra: { + actionType: "findOneAndUpdate", + }, + }, + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + lastErrorObject: { n: 1, updatedExisting: true }, + ok: 1, + value: { _id: expectValidId, name: "one" }, + }, + ]) + + await withCollection(async collection => { + expect(await collection.countDocuments()).toBe(5) + + const doc = await collection.findOne({ name: { $eq: "newName" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + name: "newName", + }) + }) + }) + + it("a distinct query", async () => { + const query = await createQuery({ + fields: { + json: "name", + extra: { + actionType: "distinct", + }, + }, + }) + + const result = await config.api.query.execute(query._id!) + const values = result.data.map(o => o.value).sort() + expect(values).toEqual(["five", "four", "one", "three", "two"]) + }) + + it("a create query with parameters", async () => { + const query = await createQuery({ + fields: { + json: { foo: "{{ foo }}" }, + extra: { + actionType: "insertOne", + }, + }, + queryVerb: "create", + parameters: [ + { + name: "foo", + default: "default", + }, + ], + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { foo: "bar" }, + }) + + expect(result.data).toEqual([ + { + acknowledged: true, + insertedId: expectValidId, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ foo: { $eq: "bar" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + foo: "bar", + }) + }) + }) + + it("a delete query with parameters", async () => { + const query = await createQuery({ + fields: { + json: { name: { $eq: "{{ name }}" } }, + extra: { + actionType: "deleteOne", + }, + }, + queryVerb: "delete", + parameters: [ + { + name: "name", + default: "", + }, + ], + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { name: "one" }, + }) + + expect(result.data).toEqual([ + { + acknowledged: true, + deletedCount: 1, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ name: { $eq: "one" } }) + expect(doc).toBeNull() + }) + }) + + it("an update query with parameters", async () => { + const query = await createQuery({ + fields: { + json: { + filter: { name: { $eq: "{{ name }}" } }, + update: { $set: { name: "{{ newName }}" } }, + }, + extra: { + actionType: "updateOne", + }, + }, + queryVerb: "update", + parameters: [ + { + name: "name", + default: "", + }, + { + name: "newName", + default: "", + }, + ], + }) + + const result = await config.api.query.execute(query._id!, { + parameters: { name: "one", newName: "newOne" }, + }) + + expect(result.data).toEqual([ + { + acknowledged: true, + matchedCount: 1, + modifiedCount: 1, + upsertedCount: 0, + upsertedId: null, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ name: { $eq: "newOne" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + name: "newOne", + }) + + const oldDoc = await collection.findOne({ name: { $eq: "one" } }) + expect(oldDoc).toBeNull() + }) + }) + + it("should be able to delete all records", async () => { + const query = await createQuery({ + fields: { + json: {}, + extra: { + actionType: "deleteMany", + }, + }, + queryVerb: "delete", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + acknowledged: true, + deletedCount: 5, + }, + ]) + + await withCollection(async collection => { + const docs = await collection.find().toArray() + expect(docs).toHaveLength(0) + }) + }) + + it("should be able to update all documents", async () => { + const query = await createQuery({ + fields: { + json: { + filter: {}, + update: { $set: { name: "newName" } }, + }, + extra: { + actionType: "updateMany", + }, + }, + queryVerb: "update", + }) + + const result = await config.api.query.execute(query._id!) + + expect(result.data).toEqual([ + { + acknowledged: true, + matchedCount: 5, + modifiedCount: 5, + upsertedCount: 0, + upsertedId: null, + }, + ]) + + await withCollection(async collection => { + const docs = await collection.find().toArray() + expect(docs).toHaveLength(5) + for (const doc of docs) { + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + name: "newName", + }) + } }) }) }) - it("a distinct query", async () => { - const query = await createQuery({ - fields: { - json: "name", - extra: { - actionType: "distinct", - }, - }, - }) - - const result = await config.api.query.execute(query._id!) - const values = result.data.map(o => o.value).sort() - expect(values).toEqual(["five", "four", "one", "three", "two"]) + it("should throw an error if the incorrect actionType is specified", async () => { + const verbs = ["read", "create", "update", "delete"] + for (const verb of verbs) { + const query = await createQuery({ + fields: { json: {}, extra: { actionType: "invalid" } }, + queryVerb: verb, + }) + await config.api.query.execute(query._id!, undefined, { status: 400 }) + } }) - it("a create query with parameters", async () => { + it("should ignore extra brackets in query", async () => { const query = await createQuery({ fields: { - json: { foo: "{{ foo }}" }, + json: { foo: "te}st" }, extra: { actionType: "insertOne", }, }, queryVerb: "create", - parameters: [ + }) + + const result = await config.api.query.execute(query._id!) + expect(result.data).toEqual([ + { + acknowledged: true, + insertedId: expectValidId, + }, + ]) + + await withCollection(async collection => { + const doc = await collection.findOne({ foo: { $eq: "te}st" } }) + expect(doc).toEqual({ + _id: expectValidBsonObjectId, + foo: "te}st", + }) + }) + }) + + it("should be able to save deeply nested data", async () => { + const data = { + foo: "bar", + data: [ + { cid: 1 }, + { cid: 2 }, { - name: "foo", - default: "default", + nested: { + name: "test", + ary: [1, 2, 3], + aryOfObjects: [{ a: 1 }, { b: 2 }], + }, }, ], + } + const query = await createQuery({ + fields: { + json: data, + extra: { + actionType: "insertOne", + }, + }, + queryVerb: "create", }) - const result = await config.api.query.execute(query._id!, { - parameters: { foo: "bar" }, - }) - + const result = await config.api.query.execute(query._id!) expect(result.data).toEqual([ { acknowledged: true, @@ -479,239 +712,10 @@ datasourceDescribe( const doc = await collection.findOne({ foo: { $eq: "bar" } }) expect(doc).toEqual({ _id: expectValidBsonObjectId, - foo: "bar", + ...data, }) }) }) - - it("a delete query with parameters", async () => { - const query = await createQuery({ - fields: { - json: { name: { $eq: "{{ name }}" } }, - extra: { - actionType: "deleteOne", - }, - }, - queryVerb: "delete", - parameters: [ - { - name: "name", - default: "", - }, - ], - }) - - const result = await config.api.query.execute(query._id!, { - parameters: { name: "one" }, - }) - - expect(result.data).toEqual([ - { - acknowledged: true, - deletedCount: 1, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ name: { $eq: "one" } }) - expect(doc).toBeNull() - }) - }) - - it("an update query with parameters", async () => { - const query = await createQuery({ - fields: { - json: { - filter: { name: { $eq: "{{ name }}" } }, - update: { $set: { name: "{{ newName }}" } }, - }, - extra: { - actionType: "updateOne", - }, - }, - queryVerb: "update", - parameters: [ - { - name: "name", - default: "", - }, - { - name: "newName", - default: "", - }, - ], - }) - - const result = await config.api.query.execute(query._id!, { - parameters: { name: "one", newName: "newOne" }, - }) - - expect(result.data).toEqual([ - { - acknowledged: true, - matchedCount: 1, - modifiedCount: 1, - upsertedCount: 0, - upsertedId: null, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ name: { $eq: "newOne" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - name: "newOne", - }) - - const oldDoc = await collection.findOne({ name: { $eq: "one" } }) - expect(oldDoc).toBeNull() - }) - }) - - it("should be able to delete all records", async () => { - const query = await createQuery({ - fields: { - json: {}, - extra: { - actionType: "deleteMany", - }, - }, - queryVerb: "delete", - }) - - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([ - { - acknowledged: true, - deletedCount: 5, - }, - ]) - - await withCollection(async collection => { - const docs = await collection.find().toArray() - expect(docs).toHaveLength(0) - }) - }) - - it("should be able to update all documents", async () => { - const query = await createQuery({ - fields: { - json: { - filter: {}, - update: { $set: { name: "newName" } }, - }, - extra: { - actionType: "updateMany", - }, - }, - queryVerb: "update", - }) - - const result = await config.api.query.execute(query._id!) - - expect(result.data).toEqual([ - { - acknowledged: true, - matchedCount: 5, - modifiedCount: 5, - upsertedCount: 0, - upsertedId: null, - }, - ]) - - await withCollection(async collection => { - const docs = await collection.find().toArray() - expect(docs).toHaveLength(5) - for (const doc of docs) { - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - name: "newName", - }) - } - }) - }) - }) - - it("should throw an error if the incorrect actionType is specified", async () => { - const verbs = ["read", "create", "update", "delete"] - for (const verb of verbs) { - const query = await createQuery({ - fields: { json: {}, extra: { actionType: "invalid" } }, - queryVerb: verb, - }) - await config.api.query.execute(query._id!, undefined, { status: 400 }) - } - }) - - it("should ignore extra brackets in query", async () => { - const query = await createQuery({ - fields: { - json: { foo: "te}st" }, - extra: { - actionType: "insertOne", - }, - }, - queryVerb: "create", - }) - - const result = await config.api.query.execute(query._id!) - expect(result.data).toEqual([ - { - acknowledged: true, - insertedId: expectValidId, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ foo: { $eq: "te}st" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - foo: "te}st", - }) - }) - }) - - it("should be able to save deeply nested data", async () => { - const data = { - foo: "bar", - data: [ - { cid: 1 }, - { cid: 2 }, - { - nested: { - name: "test", - ary: [1, 2, 3], - aryOfObjects: [{ a: 1 }, { b: 2 }], - }, - }, - ], - } - const query = await createQuery({ - fields: { - json: data, - extra: { - actionType: "insertOne", - }, - }, - queryVerb: "create", - }) - - const result = await config.api.query.execute(query._id!) - expect(result.data).toEqual([ - { - acknowledged: true, - insertedId: expectValidId, - }, - ]) - - await withCollection(async collection => { - const doc = await collection.findOne({ foo: { $eq: "bar" } }) - expect(doc).toEqual({ - _id: expectValidBsonObjectId, - ...data, - }) - }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/rowAction.spec.ts b/packages/server/src/api/routes/tests/rowAction.spec.ts index 58d7509798..dabcd11b73 100644 --- a/packages/server/src/api/routes/tests/rowAction.spec.ts +++ b/packages/server/src/api/routes/tests/rowAction.spec.ts @@ -977,63 +977,69 @@ describe("/rowsActions", () => { }) }) -datasourceDescribe( - { name: "row actions (%s)", only: [DatabaseName.SQS, DatabaseName.POSTGRES] }, - ({ config, dsProvider, isInternal }) => { - let datasource: Datasource | undefined +const descriptions = datasourceDescribe({ + exclude: [DatabaseName.SQS, DatabaseName.POSTGRES], +}) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - }) +if (descriptions.length) { + describe.each(descriptions)( + "row actions ($dbName)", + ({ config, dsProvider, isInternal }) => { + let datasource: Datasource | undefined - async function getTable(): Promise { - if (isInternal) { - await config.api.application.addSampleData(config.getAppId()) - const tables = await config.api.table.fetch() - return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)! - } else { - const table = await config.api.table.save( - setup.structures.tableForDatasource(datasource!) - ) - return table - } - } + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource + }) - it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => { - async function getRowActionsFromDb(tableId: string) { - return await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - const tableDoc = await db.tryGet( - generateRowActionsID(tableId) + async function getTable(): Promise
{ + if (isInternal) { + await config.api.application.addSampleData(config.getAppId()) + const tables = await config.api.table.fetch() + return tables.find(t => t.sourceId === DEFAULT_BB_DATASOURCE_ID)! + } else { + const table = await config.api.table.save( + setup.structures.tableForDatasource(datasource!) ) - return tableDoc - }) + return table + } } - const table = await getTable() - const tableId = table._id! + it("should delete all the row actions (and automations) for its tables when a datasource is deleted", async () => { + async function getRowActionsFromDb(tableId: string) { + return await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + const tableDoc = await db.tryGet( + generateRowActionsID(tableId) + ) + return tableDoc + }) + } - await config.api.rowAction.save(tableId, { - name: generator.guid(), + const table = await getTable() + const tableId = table._id! + + await config.api.rowAction.save(tableId, { + name: generator.guid(), + }) + await config.api.rowAction.save(tableId, { + name: generator.guid(), + }) + + const { actions } = (await getRowActionsFromDb(tableId))! + expect(Object.entries(actions)).toHaveLength(2) + + const { automations } = await config.api.automation.fetch() + expect(automations).toHaveLength(2) + + const datasource = await config.api.datasource.get(table.sourceId) + await config.api.datasource.delete(datasource) + + const automationsResp = await config.api.automation.fetch() + expect(automationsResp.automations).toHaveLength(0) + + expect(await getRowActionsFromDb(tableId)).toBeUndefined() }) - await config.api.rowAction.save(tableId, { - name: generator.guid(), - }) - - const { actions } = (await getRowActionsFromDb(tableId))! - expect(Object.entries(actions)).toHaveLength(2) - - const { automations } = await config.api.automation.fetch() - expect(automations).toHaveLength(2) - - const datasource = await config.api.datasource.get(table.sourceId) - await config.api.datasource.delete(datasource) - - const automationsResp = await config.api.automation.fetch() - expect(automationsResp.automations).toHaveLength(0) - - expect(await getRowActionsFromDb(tableId)).toBeUndefined() - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/search.spec.ts b/packages/server/src/api/routes/tests/search.spec.ts index fe13e5311f..5384444067 100644 --- a/packages/server/src/api/routes/tests/search.spec.ts +++ b/packages/server/src/api/routes/tests/search.spec.ts @@ -59,3676 +59,3781 @@ jest.mock("@budibase/pro", () => ({ }, })) -datasourceDescribe( - { - name: "search (%s)", - exclude: [DatabaseName.MONGODB], - }, - ({ config, dsProvider, isInternal, isOracle, isSql }) => { - let datasource: Datasource | undefined - let client: Knex | undefined - let tableOrViewId: string - let rows: Row[] +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - async function basicRelationshipTables(type: RelationshipType) { - const relatedTable = await createTable({ - name: { name: "name", type: FieldType.STRING }, - }) - const tableId = await createTable({ - name: { name: "name", type: FieldType.STRING }, - //@ts-ignore - API accepts this structure, will build out rest of definition - productCat: { - type: FieldType.LINK, - relationshipType: type, - name: "productCat", - fieldName: "product", - tableId: relatedTable, - constraints: { - type: "array", +if (descriptions.length) { + describe.each(descriptions)( + "search ($dbName)", + ({ config, dsProvider, isInternal, isOracle, isSql }) => { + let datasource: Datasource | undefined + let client: Knex | undefined + let tableOrViewId: string + let rows: Row[] + + async function basicRelationshipTables(type: RelationshipType) { + const relatedTable = await createTable({ + name: { name: "name", type: FieldType.STRING }, + }) + const tableId = await createTable({ + name: { name: "name", type: FieldType.STRING }, + //@ts-ignore - API accepts this structure, will build out rest of definition + productCat: { + type: FieldType.LINK, + relationshipType: type, + name: "productCat", + fieldName: "product", + tableId: relatedTable, + constraints: { + type: "array", + }, }, - }, - }) - return { - relatedTable: await config.api.table.get(relatedTable), - tableId, + }) + return { + relatedTable: await config.api.table.get(relatedTable), + tableId, + } } - } - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - client = ds.client + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource + client = ds.client - config.app = await config.api.application.update(config.getAppId(), { - snippets: [ - { - name: "WeeksAgo", - code: ` + config.app = await config.api.application.update(config.getAppId(), { + snippets: [ + { + name: "WeeksAgo", + code: ` return function (weeks) { const currentTime = new Date(${Date.now()}); currentTime.setDate(currentTime.getDate()-(7 * (weeks || 1))); return currentTime.toISOString(); } `, - }, - ], - }) - }) - - async function createTable(schema?: TableSchema) { - const table = await config.api.table.save( - tableForDatasource(datasource, { schema }) - ) - return table._id! - } - - async function createView(tableId: string, schema?: ViewV2Schema) { - const view = await config.api.viewV2.create({ - tableId: tableId, - name: generator.guid(), - schema, - }) - return view.id - } - - async function createRows(arr: Record[]) { - // Shuffling to avoid false positives given a fixed order - for (const row of _.shuffle(arr)) { - await config.api.row.save(tableOrViewId, row) - } - rows = await config.api.row.fetch(tableOrViewId) - } - - async function getTable(tableOrViewId: string): Promise
{ - if (docIds.isViewId(tableOrViewId)) { - const view = await config.api.viewV2.get(tableOrViewId) - return await config.api.table.get(view.tableId) - } else { - return await config.api.table.get(tableOrViewId) - } - } - - async function assertTableExists(nameOrTable: string | Table) { - const name = - typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name - expect(await client!.schema.hasTable(name)).toBeTrue() - } - - async function assertTableNumRows( - nameOrTable: string | Table, - numRows: number - ) { - const name = - typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name - const row = await client!.from(name).count() - const count = parseInt(Object.values(row[0])[0] as string) - expect(count).toEqual(numRows) - } - - describe.each([true, false])("in-memory: %s", isInMemory => { - // We only run the in-memory tests during the SQS (isInternal) run - if (isInMemory && !isInternal) { - return - } - - type CreateFn = (schema?: TableSchema) => Promise - let tableOrView: [string, CreateFn][] = [["table", createTable]] - - if (!isInMemory) { - tableOrView.push([ - "view", - async (schema?: TableSchema) => { - const tableId = await createTable(schema) - const viewId = await createView( - tableId, - Object.keys(schema || {}).reduce( - (viewSchema, fieldName) => { - const field = schema![fieldName] - viewSchema[fieldName] = { - visible: field.visible ?? true, - readonly: false, - } - return viewSchema - }, - {} - ) - ) - return viewId - }, - ]) - } - - describe.each(tableOrView)("from %s", (sourceType, createTableOrView) => { - const isView = sourceType === "view" - - class SearchAssertion { - constructor(private readonly query: SearchRowRequest) {} - - private async performSearch(): Promise> { - if (isInMemory) { - return dataFilters.search(_.cloneDeep(rows), { - ...this.query, - }) - } else { - return config.api.row.search(tableOrViewId, this.query) - } - } - - // We originally used _.isMatch to compare rows, but found that when - // comparing arrays it would return true if the source array was a subset of - // the target array. This would sometimes create false matches. This - // function is a more strict version of _.isMatch that only returns true if - // the source array is an exact match of the target. - // - // _.isMatch("100", "1") also returns true which is not what we want. - private isMatch>( - expected: T, - found: T - ) { - if (!expected) { - throw new Error("Expected is undefined") - } - if (!found) { - return false - } - - for (const key of Object.keys(expected)) { - if (Array.isArray(expected[key])) { - if (!Array.isArray(found[key])) { - return false - } - if (expected[key].length !== found[key].length) { - return false - } - if (!_.isMatch(found[key], expected[key])) { - return false - } - } else if (typeof expected[key] === "object") { - if (!this.isMatch(expected[key], found[key])) { - return false - } - } else { - if (expected[key] !== found[key]) { - return false - } - } - } - return true - } - - // This function exists to ensure that the same row is not matched twice. - // When a row gets matched, we make sure to remove it from the list of rows - // we're matching against. - private popRow( - expectedRow: T, - foundRows: T[] - ): NonNullable { - const row = foundRows.find(row => this.isMatch(expectedRow, row)) - if (!row) { - const fields = Object.keys(expectedRow) - // To make the error message more readable, we only include the fields - // that are present in the expected row. - const searchedObjects = foundRows.map(row => _.pick(row, fields)) - throw new Error( - `Failed to find row:\n\n${JSON.stringify( - expectedRow, - null, - 2 - )}\n\nin\n\n${JSON.stringify(searchedObjects, null, 2)}` - ) - } - - foundRows.splice(foundRows.indexOf(row), 1) - return row - } - - // Asserts that the query returns rows matching exactly the set of rows - // passed in. The order of the rows matters. Rows returned in an order - // different to the one passed in will cause the assertion to fail. Extra - // rows returned by the query will also cause the assertion to fail. - async toMatchExactly(expectedRows: any[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const foundRows = response.rows - - // eslint-disable-next-line jest/no-standalone-expect - expect(foundRows).toHaveLength(expectedRows.length) - // eslint-disable-next-line jest/no-standalone-expect - expect([...foundRows]).toEqual( - expectedRows.map((expectedRow: any) => - expect.objectContaining(this.popRow(expectedRow, foundRows)) - ) - ) - return cloned - } - - // Asserts that the query returns rows matching exactly the set of rows - // passed in. The order of the rows is not important, but extra rows will - // cause the assertion to fail. - async toContainExactly(expectedRows: any[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const foundRows = response.rows - - // eslint-disable-next-line jest/no-standalone-expect - expect(foundRows).toHaveLength(expectedRows.length) - // eslint-disable-next-line jest/no-standalone-expect - expect([...foundRows]).toEqual( - expect.arrayContaining( - expectedRows.map((expectedRow: any) => - expect.objectContaining(this.popRow(expectedRow, foundRows)) - ) - ) - ) - return cloned - } - - // Asserts that the query returns some property values - this cannot be used - // to check row values, however this shouldn't be important for checking properties - // typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...) - async toMatch(properties: Record) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const keys = Object.keys(properties) as Array< - keyof SearchResponse - > - for (let key of keys) { - // eslint-disable-next-line jest/no-standalone-expect - expect(response[key]).toBeDefined() - if (properties[key]) { - // eslint-disable-next-line jest/no-standalone-expect - expect(response[key]).toEqual(properties[key]) - } - } - return cloned - } - - // Asserts that the query doesn't return a property, e.g. pagination parameters. - async toNotHaveProperty(properties: (keyof SearchResponse)[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - for (let property of properties) { - // eslint-disable-next-line jest/no-standalone-expect - expect(response[property]).toBeUndefined() - } - return cloned - } - - // Asserts that the query returns rows matching the set of rows passed in. - // The order of the rows is not important. Extra rows will not cause the - // assertion to fail. - async toContain(expectedRows: any[]) { - const response = await this.performSearch() - const cloned = cloneDeep(response) - const foundRows = response.rows - - // eslint-disable-next-line jest/no-standalone-expect - expect([...foundRows]).toEqual( - expect.arrayContaining( - expectedRows.map((expectedRow: any) => - expect.objectContaining(this.popRow(expectedRow, foundRows)) - ) - ) - ) - return cloned - } - - async toFindNothing() { - await this.toContainExactly([]) - } - - async toHaveLength(length: number) { - const { rows: foundRows } = await this.performSearch() - - // eslint-disable-next-line jest/no-standalone-expect - expect(foundRows).toHaveLength(length) - } - } - - function expectSearch(query: SearchRowRequest) { - return new SearchAssertion(query) - } - - function expectQuery(query: SearchFilters) { - return expectSearch({ query }) - } - - describe("boolean", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - isTrue: { name: "isTrue", type: FieldType.BOOLEAN }, - }) - await createRows([{ isTrue: true }, { isTrue: false }]) - }) - - describe("equal", () => { - it("successfully finds true row", async () => { - await expectQuery({ equal: { isTrue: true } }).toMatchExactly([ - { isTrue: true }, - ]) - }) - - it("successfully finds false row", async () => { - await expectQuery({ equal: { isTrue: false } }).toMatchExactly([ - { isTrue: false }, - ]) - }) - }) - - describe("notEqual", () => { - it("successfully finds false row", async () => { - await expectQuery({ - notEqual: { isTrue: true }, - }).toContainExactly([{ isTrue: false }]) - }) - - it("successfully finds true row", async () => { - await expectQuery({ - notEqual: { isTrue: false }, - }).toContainExactly([{ isTrue: true }]) - }) - }) - - describe("oneOf", () => { - it("successfully finds true row", async () => { - await expectQuery({ oneOf: { isTrue: [true] } }).toContainExactly( - [{ isTrue: true }] - ) - }) - - it("successfully finds false row", async () => { - await expectQuery({ - oneOf: { isTrue: [false] }, - }).toContainExactly([{ isTrue: false }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "isTrue", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ isTrue: false }, { isTrue: true }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "isTrue", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ isTrue: true }, { isTrue: false }]) - }) - }) + }, + ], }) + }) - !isInMemory && - describe("bindings", () => { - let globalUsers: any = [] + async function createTable(schema?: TableSchema) { + const table = await config.api.table.save( + tableForDatasource(datasource, { schema }) + ) + return table._id! + } - const serverTime = new Date() + async function createView(tableId: string, schema?: ViewV2Schema) { + const view = await config.api.viewV2.create({ + tableId: tableId, + name: generator.guid(), + schema, + }) + return view.id + } - // In MariaDB and MySQL we only store dates to second precision, so we need - // to remove milliseconds from the server time to ensure searches work as - // expected. - serverTime.setMilliseconds(0) + async function createRows(arr: Record[]) { + // Shuffling to avoid false positives given a fixed order + for (const row of _.shuffle(arr)) { + await config.api.row.save(tableOrViewId, row) + } + rows = await config.api.row.fetch(tableOrViewId) + } - const future = new Date( - serverTime.getTime() + 1000 * 60 * 60 * 24 * 30 - ) + async function getTable(tableOrViewId: string): Promise
{ + if (docIds.isViewId(tableOrViewId)) { + const view = await config.api.viewV2.get(tableOrViewId) + return await config.api.table.get(view.tableId) + } else { + return await config.api.table.get(tableOrViewId) + } + } - const rows = (currentUser: User) => { - return [ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - { - name: currentUser.firstName, - appointment: future.toISOString(), - }, - { name: "serverDate", appointment: serverTime.toISOString() }, - { - name: "single user, session user", - single_user: currentUser, - }, - { - name: "single user", - single_user: globalUsers[0], - }, - { - name: "deprecated single user, session user", - deprecated_single_user: [currentUser], - }, - { - name: "deprecated single user", - deprecated_single_user: [globalUsers[0]], - }, - { - name: "multi user", - multi_user: globalUsers, - }, - { - name: "multi user with session user", - multi_user: [...globalUsers, currentUser], - }, - { - name: "deprecated multi user", - deprecated_multi_user: globalUsers, - }, - { - name: "deprecated multi user with session user", - deprecated_multi_user: [...globalUsers, currentUser], - }, - ] + async function assertTableExists(nameOrTable: string | Table) { + const name = + typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name + expect(await client!.schema.hasTable(name)).toBeTrue() + } + + async function assertTableNumRows( + nameOrTable: string | Table, + numRows: number + ) { + const name = + typeof nameOrTable === "string" ? nameOrTable : nameOrTable.name + const row = await client!.from(name).count() + const count = parseInt(Object.values(row[0])[0] as string) + expect(count).toEqual(numRows) + } + + describe.each([true, false])("in-memory: %s", isInMemory => { + // We only run the in-memory tests during the SQS (isInternal) run + if (isInMemory && !isInternal) { + return + } + + type CreateFn = (schema?: TableSchema) => Promise + let tableOrView: [string, CreateFn][] = [["table", createTable]] + + if (!isInMemory) { + tableOrView.push([ + "view", + async (schema?: TableSchema) => { + const tableId = await createTable(schema) + const viewId = await createView( + tableId, + Object.keys(schema || {}).reduce( + (viewSchema, fieldName) => { + const field = schema![fieldName] + viewSchema[fieldName] = { + visible: field.visible ?? true, + readonly: false, + } + return viewSchema + }, + {} + ) + ) + return viewId + }, + ]) + } + + describe.each(tableOrView)( + "from %s", + (sourceType, createTableOrView) => { + const isView = sourceType === "view" + + class SearchAssertion { + constructor(private readonly query: SearchRowRequest) {} + + private async performSearch(): Promise> { + if (isInMemory) { + return dataFilters.search(_.cloneDeep(rows), { + ...this.query, + }) + } else { + return config.api.row.search(tableOrViewId, this.query) + } + } + + // We originally used _.isMatch to compare rows, but found that when + // comparing arrays it would return true if the source array was a subset of + // the target array. This would sometimes create false matches. This + // function is a more strict version of _.isMatch that only returns true if + // the source array is an exact match of the target. + // + // _.isMatch("100", "1") also returns true which is not what we want. + private isMatch>( + expected: T, + found: T + ) { + if (!expected) { + throw new Error("Expected is undefined") + } + if (!found) { + return false + } + + for (const key of Object.keys(expected)) { + if (Array.isArray(expected[key])) { + if (!Array.isArray(found[key])) { + return false + } + if (expected[key].length !== found[key].length) { + return false + } + if (!_.isMatch(found[key], expected[key])) { + return false + } + } else if (typeof expected[key] === "object") { + if (!this.isMatch(expected[key], found[key])) { + return false + } + } else { + if (expected[key] !== found[key]) { + return false + } + } + } + return true + } + + // This function exists to ensure that the same row is not matched twice. + // When a row gets matched, we make sure to remove it from the list of rows + // we're matching against. + private popRow( + expectedRow: T, + foundRows: T[] + ): NonNullable { + const row = foundRows.find(row => + this.isMatch(expectedRow, row) + ) + if (!row) { + const fields = Object.keys(expectedRow) + // To make the error message more readable, we only include the fields + // that are present in the expected row. + const searchedObjects = foundRows.map(row => + _.pick(row, fields) + ) + throw new Error( + `Failed to find row:\n\n${JSON.stringify( + expectedRow, + null, + 2 + )}\n\nin\n\n${JSON.stringify(searchedObjects, null, 2)}` + ) + } + + foundRows.splice(foundRows.indexOf(row), 1) + return row + } + + // Asserts that the query returns rows matching exactly the set of rows + // passed in. The order of the rows matters. Rows returned in an order + // different to the one passed in will cause the assertion to fail. Extra + // rows returned by the query will also cause the assertion to fail. + async toMatchExactly(expectedRows: any[]) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const foundRows = response.rows + + // eslint-disable-next-line jest/no-standalone-expect + expect(foundRows).toHaveLength(expectedRows.length) + // eslint-disable-next-line jest/no-standalone-expect + expect([...foundRows]).toEqual( + expectedRows.map((expectedRow: any) => + expect.objectContaining(this.popRow(expectedRow, foundRows)) + ) + ) + return cloned + } + + // Asserts that the query returns rows matching exactly the set of rows + // passed in. The order of the rows is not important, but extra rows will + // cause the assertion to fail. + async toContainExactly(expectedRows: any[]) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const foundRows = response.rows + + // eslint-disable-next-line jest/no-standalone-expect + expect(foundRows).toHaveLength(expectedRows.length) + // eslint-disable-next-line jest/no-standalone-expect + expect([...foundRows]).toEqual( + expect.arrayContaining( + expectedRows.map((expectedRow: any) => + expect.objectContaining( + this.popRow(expectedRow, foundRows) + ) + ) + ) + ) + return cloned + } + + // Asserts that the query returns some property values - this cannot be used + // to check row values, however this shouldn't be important for checking properties + // typing for this has to be any, Jest doesn't expose types for matchers like expect.any(...) + async toMatch(properties: Record) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const keys = Object.keys(properties) as Array< + keyof SearchResponse + > + for (let key of keys) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toBeDefined() + if (properties[key]) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[key]).toEqual(properties[key]) + } + } + return cloned + } + + // Asserts that the query doesn't return a property, e.g. pagination parameters. + async toNotHaveProperty( + properties: (keyof SearchResponse)[] + ) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + for (let property of properties) { + // eslint-disable-next-line jest/no-standalone-expect + expect(response[property]).toBeUndefined() + } + return cloned + } + + // Asserts that the query returns rows matching the set of rows passed in. + // The order of the rows is not important. Extra rows will not cause the + // assertion to fail. + async toContain(expectedRows: any[]) { + const response = await this.performSearch() + const cloned = cloneDeep(response) + const foundRows = response.rows + + // eslint-disable-next-line jest/no-standalone-expect + expect([...foundRows]).toEqual( + expect.arrayContaining( + expectedRows.map((expectedRow: any) => + expect.objectContaining( + this.popRow(expectedRow, foundRows) + ) + ) + ) + ) + return cloned + } + + async toFindNothing() { + await this.toContainExactly([]) + } + + async toHaveLength(length: number) { + const { rows: foundRows } = await this.performSearch() + + // eslint-disable-next-line jest/no-standalone-expect + expect(foundRows).toHaveLength(length) + } } - beforeAll(async () => { - // Set up some global users - globalUsers = await Promise.all( - Array(2) - .fill(0) - .map(async () => { - const globalUser = await config.globalUser() - const userMedataId = globalUser._id - ? dbCore.generateUserMetadataID(globalUser._id) - : null - return { - _id: globalUser._id, - _meta: userMedataId, - } - }) - ) + function expectSearch(query: SearchRowRequest) { + return new SearchAssertion(query) + } - tableOrViewId = await createTableOrView({ - name: { name: "name", type: FieldType.STRING }, - appointment: { name: "appointment", type: FieldType.DATETIME }, - single_user: { - name: "single_user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - deprecated_single_user: { - name: "deprecated_single_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - }, - multi_user: { - name: "multi_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }, - deprecated_multi_user: { - name: "deprecated_multi_user", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USERS, - constraints: { - type: "array", - }, - }, - }) - await createRows(rows(config.getUser())) - }) + function expectQuery(query: SearchFilters) { + return expectSearch({ query }) + } - // !! Current User is auto generated per run - it("should return all rows matching the session user firstname", async () => { - await expectQuery({ - equal: { name: "{{ [user].firstName }}" }, - }).toContainExactly([ - { - name: config.getUser().firstName, - appointment: future.toISOString(), - }, - ]) - }) - - it("should return all rows matching the session user firstname when logical operator used", async () => { - await expectQuery({ - $and: { - conditions: [{ equal: { name: "{{ [user].firstName }}" } }], - }, - }).toContainExactly([ - { - name: config.getUser().firstName, - appointment: future.toISOString(), - }, - ]) - }) - - it("should parse the date binding and return all rows after the resolved value", async () => { - await tk.withFreeze(serverTime, async () => { - await expectQuery({ - range: { - appointment: { - low: "{{ [now] }}", - high: "9999-00-00T00:00:00.000Z", - }, - }, - }).toContainExactly([ - { - name: config.getUser().firstName, - appointment: future.toISOString(), - }, - { name: "serverDate", appointment: serverTime.toISOString() }, - ]) - }) - }) - - it("should parse the date binding and return all rows before the resolved value", async () => { - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: "{{ [now] }}", - }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - { name: "serverDate", appointment: serverTime.toISOString() }, - ]) - }) - - it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => { - const jsBinding = "return snippets.WeeksAgo();" - const encodedBinding = encodeJSBinding(jsBinding) - - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: encodedBinding, - }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - ]) - }) - - it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => { - const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();` - const encodedBinding = encodeJSBinding(jsBinding) - - await expectQuery({ - range: { - appointment: { - low: "0000-00-00T00:00:00.000Z", - high: encodedBinding, - }, - }, - }).toContainExactly([ - { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, - { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, - ]) - }) - - it("should match a single user row by the session user id", async () => { - await expectQuery({ - equal: { single_user: "{{ [user]._id }}" }, - }).toContainExactly([ - { - name: "single user, session user", - single_user: { _id: config.getUser()._id }, - }, - ]) - }) - - it("should match a deprecated single user row by the session user id", async () => { - await expectQuery({ - equal: { deprecated_single_user: "{{ [user]._id }}" }, - }).toContainExactly([ - { - name: "deprecated single user, session user", - deprecated_single_user: [{ _id: config.getUser()._id }], - }, - ]) - }) - - it("should match the session user id in a multi user field", async () => { - const allUsers = [...globalUsers, config.getUser()].map( - (user: any) => { - return { _id: user._id } - } - ) - - await expectQuery({ - contains: { multi_user: ["{{ [user]._id }}"] }, - }).toContainExactly([ - { - name: "multi user with session user", - multi_user: allUsers, - }, - ]) - }) - - it("should match the session user id in a deprecated multi user field", async () => { - const allUsers = [...globalUsers, config.getUser()].map( - (user: any) => { - return { _id: user._id } - } - ) - - await expectQuery({ - contains: { deprecated_multi_user: ["{{ [user]._id }}"] }, - }).toContainExactly([ - { - name: "deprecated multi user with session user", - deprecated_multi_user: allUsers, - }, - ]) - }) - - it("should not match the session user id in a multi user field", async () => { - await expectQuery({ - notContains: { multi_user: ["{{ [user]._id }}"] }, - notEmpty: { multi_user: true }, - }).toContainExactly([ - { - name: "multi user", - multi_user: globalUsers.map((user: any) => { - return { _id: user._id } - }), - }, - ]) - }) - - it("should not match the session user id in a deprecated multi user field", async () => { - await expectQuery({ - notContains: { deprecated_multi_user: ["{{ [user]._id }}"] }, - notEmpty: { deprecated_multi_user: true }, - }).toContainExactly([ - { - name: "deprecated multi user", - deprecated_multi_user: globalUsers.map((user: any) => { - return { _id: user._id } - }), - }, - ]) - }) - - it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => { - await expectQuery({ - oneOf: { - single_user: [ - "{{ default [user]._id '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "single user, session user", - single_user: { _id: config.getUser()._id }, - }, - { - name: "single user", - single_user: { _id: globalUsers[0]._id }, - }, - ]) - }) - - it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => { - await expectQuery({ - oneOf: { - deprecated_single_user: [ - "{{ default [user]._id '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "deprecated single user, session user", - deprecated_single_user: [{ _id: config.getUser()._id }], - }, - { - name: "deprecated single user", - deprecated_single_user: [{ _id: globalUsers[0]._id }], - }, - ]) - }) - - it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => { - await expectQuery({ - oneOf: { - single_user: [ - "{{ default [user]._idx '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "single user", - single_user: { _id: globalUsers[0]._id }, - }, - ]) - }) - - it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => { - await expectQuery({ - oneOf: { - deprecated_single_user: [ - "{{ default [user]._idx '_empty_' }}", - globalUsers[0]._id, - ], - }, - }).toContainExactly([ - { - name: "deprecated single user", - deprecated_single_user: [{ _id: globalUsers[0]._id }], - }, - ]) - }) - }) - - const stringTypes = [FieldType.STRING, FieldType.LONGFORM] as const - describe.each(stringTypes)("%s", type => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { name: "name", type }, - }) - await createRows([{ name: "foo" }, { name: "bar" }]) - }) - - describe("misc", () => { - it("should return all if no query is passed", async () => { - await expectSearch({} as RowSearchParams).toContainExactly([ - { name: "foo" }, - { name: "bar" }, - ]) - }) - - it("should return all if empty query is passed", async () => { - await expectQuery({}).toContainExactly([ - { name: "foo" }, - { name: "bar" }, - ]) - }) - - it("should return all if onEmptyFilter is RETURN_ALL", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("should return nothing if onEmptyFilter is RETURN_NONE", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toFindNothing() + describe("boolean", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + isTrue: { name: "isTrue", type: FieldType.BOOLEAN }, + }) + await createRows([{ isTrue: true }, { isTrue: false }]) }) - it("should respect limit", async () => { - await expectSearch({ - limit: 1, - paginate: true, - query: {}, - }).toHaveLength(1) - }) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { name: "foo" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { name: "none" } }).toFindNothing() - }) - - it("works as an or condition", async () => { - await expectQuery({ - allOr: true, - equal: { name: "foo" }, - oneOf: { name: ["bar"] }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("can have multiple values for same column", async () => { - await expectQuery({ - allOr: true, - equal: { "1:name": "foo", "2:name": "bar" }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { name: "foo" } }).toContainExactly( - [{ name: "bar" }] - ) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { name: "bar" } }).toContainExactly( - [{ name: "foo" }] - ) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { name: ["foo"] } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { name: ["none"] } }).toFindNothing() - }) - - it("can have multiple values for same column", async () => { - await expectQuery({ - oneOf: { - name: ["foo", "bar"], - }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("splits comma separated strings", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - name: "foo,bar", - }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("trims whitespace", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - name: "foo, bar", - }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("empty arrays returns all when onEmptyFilter is set to return 'all'", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - oneOf: { name: [] }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("empty arrays returns all when onEmptyFilter is set to return 'none'", async () => { - await expectQuery({ - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - oneOf: { name: [] }, - }).toContainExactly([]) - }) - }) - - describe("fuzzy", () => { - it("successfully finds a row", async () => { - await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ fuzzy: { name: "none" } }).toFindNothing() - }) - }) - - describe("string", () => { - it("successfully finds a row", async () => { - await expectQuery({ string: { name: "fo" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ string: { name: "none" } }).toFindNothing() - }) - - it("is case-insensitive", async () => { - await expectQuery({ string: { name: "FO" } }).toContainExactly([ - { name: "foo" }, - ]) - }) - }) - - describe("range", () => { - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { name: { low: "a", high: "z" } }, - }).toContainExactly([{ name: "bar" }, { name: "foo" }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { name: { low: "a", high: "c" } }, - }).toContainExactly([{ name: "bar" }]) - }) - - it("successfully finds a row with a low bound", async () => { - await expectQuery({ - range: { name: { low: "f", high: "z" } }, - }).toContainExactly([{ name: "foo" }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { name: { low: "g", high: "h" } }, - }).toFindNothing() - }) - - it("ignores low if it's an empty object", async () => { - await expectQuery({ - // @ts-ignore - range: { name: { low: {}, high: "z" } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - - it("ignores high if it's an empty object", async () => { - await expectQuery({ - // @ts-ignore - range: { name: { low: "a", high: {} } }, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - describe("empty", () => { - it("finds no empty rows", async () => { - await expectQuery({ empty: { name: null } }).toFindNothing() - }) - - it("should not be affected by when filter empty behaviour", async () => { - await expectQuery({ - empty: { name: null }, - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - }).toFindNothing() - }) - }) - - describe("notEmpty", () => { - it("finds all non-empty rows", async () => { - await expectQuery({ notEmpty: { name: null } }).toContainExactly([ - { name: "foo" }, - { name: "bar" }, - ]) - }) - - it("should not be affected by when filter empty behaviour", async () => { - await expectQuery({ - notEmpty: { name: null }, - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - }).toContainExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) - }) - - describe("sortType STRING", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortType: SortType.STRING, - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "name", - sortType: SortType.STRING, - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) - }) - }) - - !isInternal && - !isInMemory && - // This test was added because we automatically add in a sort by the - // primary key, and we used to do this unconditionally which caused - // problems because it was possible for the primary key to appear twice - // in the resulting SQL ORDER BY clause, resulting in an SQL error. - // We now check first to make sure that the primary key isn't already - // in the sort before adding it. - describe("sort on primary key", () => { - beforeAll(async () => { - const tableName = structures.uuid().substring(0, 10) - await client!.schema.createTable(tableName, t => { - t.string("name").primary() - }) - const resp = await config.api.datasource.fetchSchema({ - datasourceId: datasource!._id!, - }) - - tableOrViewId = resp.datasource.entities![tableName]._id! - - await createRows([{ name: "foo" }, { name: "bar" }]) + describe("equal", () => { + it("successfully finds true row", async () => { + await expectQuery({ equal: { isTrue: true } }).toMatchExactly( + [{ isTrue: true }] + ) }) - it("should be able to sort by a primary key column ascending", async () => - expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) - - it("should be able to sort by a primary key column descending", async () => - expectSearch({ - query: {}, - sort: "name", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) - }) - }) - }) - - describe("numbers", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - age: { name: "age", type: FieldType.NUMBER }, - }) - await createRows([{ age: 1 }, { age: 10 }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { age: 1 } }).toContainExactly([ - { age: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { age: 2 } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { age: 1 } }).toContainExactly([ - { age: 10 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { age: 10 } }).toContainExactly([ - { age: 1 }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { age: [1] } }).toContainExactly([ - { age: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { age: [2] } }).toFindNothing() - }) - - it("can convert from a string", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - age: "1", - }, - }).toContainExactly([{ age: 1 }]) - }) - - it("can find multiple values for same column", async () => { - await expectQuery({ - oneOf: { - // @ts-ignore - age: "1,10", - }, - }).toContainExactly([{ age: 1 }, { age: 10 }]) - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { age: { low: 1, high: 5 } }, - }).toContainExactly([{ age: 1 }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { age: { low: 1, high: 10 } }, - }).toContainExactly([{ age: 1 }, { age: 10 }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { age: { low: 5, high: 10 } }, - }).toContainExactly([{ age: 10 }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { age: { low: 5, high: 9 } }, - }).toFindNothing() - }) - - it("greater than equal to", async () => { - await expectQuery({ - range: { - age: { low: 10, high: Number.MAX_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 10 }]) - }) - - it("greater than", async () => { - await expectQuery({ - range: { - age: { low: 5, high: Number.MAX_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 10 }]) - }) - - it("less than equal to", async () => { - await expectQuery({ - range: { - age: { high: 1, low: Number.MIN_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 1 }]) - }) - - it("less than", async () => { - await expectQuery({ - range: { - age: { high: 5, low: Number.MIN_SAFE_INTEGER }, - }, - }).toContainExactly([{ age: 1 }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }]) - }) - }) - - describe("sortType NUMBER", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortType: SortType.NUMBER, - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ age: 1 }, { age: 10 }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "age", - sortType: SortType.NUMBER, - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ age: 10 }, { age: 1 }]) - }) - }) - }) - - describe("dates", () => { - const JAN_1ST = "2020-01-01T00:00:00.000Z" - const JAN_2ND = "2020-01-02T00:00:00.000Z" - const JAN_5TH = "2020-01-05T00:00:00.000Z" - const JAN_9TH = "2020-01-09T00:00:00.000Z" - const JAN_10TH = "2020-01-10T00:00:00.000Z" - - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - dob: { name: "dob", type: FieldType.DATETIME }, - }) - - await createRows([{ dob: JAN_1ST }, { dob: JAN_10TH }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { dob: JAN_1ST } }).toContainExactly([ - { dob: JAN_1ST }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notEqual: { dob: JAN_1ST }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notEqual: { dob: JAN_10TH }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { dob: [JAN_1ST] } }).toContainExactly( - [{ dob: JAN_1ST }] - ) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { dob: [JAN_2ND] } }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { dob: { low: JAN_1ST, high: JAN_5TH } }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { dob: { low: JAN_1ST, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { dob: { low: JAN_5TH, high: JAN_10TH } }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { dob: { low: JAN_5TH, high: JAN_9TH } }, - }).toFindNothing() - }) - - it("greater than equal to", async () => { - await expectQuery({ - range: { - dob: { low: JAN_10TH, high: MAX_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("greater than", async () => { - await expectQuery({ - range: { - dob: { low: JAN_5TH, high: MAX_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_10TH }]) - }) - - it("less than equal to", async () => { - await expectQuery({ - range: { - dob: { high: JAN_1ST, low: MIN_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - - it("less than", async () => { - await expectQuery({ - range: { - dob: { high: JAN_5TH, low: MIN_VALID_DATE.toISOString() }, - }, - }).toContainExactly([{ dob: JAN_1ST }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) - }) - - describe("sortType STRING", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortType: SortType.STRING, - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + it("successfully finds false row", async () => { + await expectQuery({ + equal: { isTrue: false }, + }).toMatchExactly([{ isTrue: false }]) + }) }) - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "dob", - sortType: SortType.STRING, - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) - }) - }) - }) - }) + describe("notEqual", () => { + it("successfully finds false row", async () => { + await expectQuery({ + notEqual: { isTrue: true }, + }).toContainExactly([{ isTrue: false }]) + }) - !isInternal && - describe("datetime - time only", () => { - const T_1000 = "10:00:00" - const T_1045 = "10:45:00" - const T_1200 = "12:00:00" - const T_1530 = "15:30:00" - const T_0000 = "00:00:00" - - const UNEXISTING_TIME = "10:01:00" - - const NULL_TIME__ID = `null_time__id` - - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - timeid: { name: "timeid", type: FieldType.STRING }, - time: { - name: "time", - type: FieldType.DATETIME, - timeOnly: true, - }, + it("successfully finds true row", async () => { + await expectQuery({ + notEqual: { isTrue: false }, + }).toContainExactly([{ isTrue: true }]) + }) }) - await createRows([ - { timeid: NULL_TIME__ID, time: null }, - { time: T_1000 }, - { time: T_1045 }, - { time: T_1200 }, - { time: T_1530 }, - { time: T_0000 }, - ]) - }) + describe("oneOf", () => { + it("successfully finds true row", async () => { + await expectQuery({ + oneOf: { isTrue: [true] }, + }).toContainExactly([{ isTrue: true }]) + }) - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { time: T_1000 } }).toContainExactly( - [{ time: "10:00:00" }] - ) + it("successfully finds false row", async () => { + await expectQuery({ + oneOf: { isTrue: [false] }, + }).toContainExactly([{ isTrue: false }]) + }) }) - it("fails to find nonexistent row", async () => { - await expectQuery({ - equal: { time: UNEXISTING_TIME }, - }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notEqual: { time: T_1000 }, - }).toContainExactly([ - { timeid: NULL_TIME__ID }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - { time: "00:00:00" }, - ]) - }) - - it("return all when requesting non-existing", async () => { - await expectQuery({ - notEqual: { time: UNEXISTING_TIME }, - }).toContainExactly([ - { timeid: NULL_TIME__ID }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - { time: "00:00:00" }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ - oneOf: { time: [T_1000] }, - }).toContainExactly([{ time: "10:00:00" }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - oneOf: { time: [UNEXISTING_TIME] }, - }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { time: { low: T_1045, high: T_1045 } }, - }).toContainExactly([{ time: "10:45:00" }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { time: { low: T_1045, high: T_1530 } }, - }).toContainExactly([ - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - ]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { - time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME }, - }, - }).toFindNothing() - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "time", - sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([ - { timeid: NULL_TIME__ID }, - { time: "00:00:00" }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - ]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "time", - sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([ - { time: "15:30:00" }, - { time: "12:00:00" }, - { time: "10:45:00" }, - { time: "10:00:00" }, - { time: "00:00:00" }, - { timeid: NULL_TIME__ID }, - ]) - }) - - describe("sortType STRING", () => { + describe("sort", () => { it("sorts ascending", async () => { await expectSearch({ query: {}, - sort: "time", - sortType: SortType.STRING, + sort: "isTrue", sortOrder: SortOrder.ASCENDING, - }).toMatchExactly([ - { timeid: NULL_TIME__ID }, - { time: "00:00:00" }, - { time: "10:00:00" }, - { time: "10:45:00" }, - { time: "12:00:00" }, - { time: "15:30:00" }, - ]) + }).toMatchExactly([{ isTrue: false }, { isTrue: true }]) }) it("sorts descending", async () => { await expectSearch({ query: {}, - sort: "time", - sortType: SortType.STRING, + sort: "isTrue", sortOrder: SortOrder.DESCENDING, - }).toMatchExactly([ - { time: "15:30:00" }, - { time: "12:00:00" }, - { time: "10:45:00" }, - { time: "10:00:00" }, - { time: "00:00:00" }, - { timeid: NULL_TIME__ID }, - ]) + }).toMatchExactly([{ isTrue: true }, { isTrue: false }]) }) }) }) - }) - isInternal && - !isInMemory && - describe("AI Column", () => { - const UNEXISTING_AI_COLUMN = "Real LLM Response" + !isInMemory && + describe("bindings", () => { + let globalUsers: any = [] - beforeAll(async () => { - mocks.licenses.useBudibaseAI() - mocks.licenses.useAICustomConfigs() + const serverTime = new Date() - tableOrViewId = await createTableOrView({ - product: { name: "product", type: FieldType.STRING }, - ai: { - name: "AI", - type: FieldType.AI, - operation: AIOperationEnum.PROMPT, - prompt: "Translate '{{ product }}' into German", - }, - }) + // In MariaDB and MySQL we only store dates to second precision, so we need + // to remove milliseconds from the server time to ensure searches work as + // expected. + serverTime.setMilliseconds(0) - await createRows([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - - describe("equal", () => { - it("successfully finds rows based on AI column", async () => { - await expectQuery({ - equal: { ai: "Mock LLM Response" }, - }).toContainExactly([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - equal: { ai: UNEXISTING_AI_COLUMN }, - }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("Returns nothing when searching notEqual on the mock AI response", async () => { - await expectQuery({ - notEqual: { ai: "Mock LLM Response" }, - }).toContainExactly([]) - }) - - it("return all when requesting non-existing response", async () => { - await expectQuery({ - notEqual: { ai: "Real LLM Response" }, - }).toContainExactly([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ - oneOf: { ai: ["Mock LLM Response", "Other LLM Response"] }, - }).toContainExactly([ - { product: "Big Mac" }, - { product: "McCrispy" }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - oneOf: { ai: ["Whopper"] }, - }).toFindNothing() - }) - }) - }) - - describe("arrays", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - numbers: { - name: "numbers", - type: FieldType.ARRAY, - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["one", "two", "three"], - }, - }, - }) - await createRows([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - - describe("contains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - contains: { numbers: ["one"] }, - }).toContainExactly([{ numbers: ["one", "two"] }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - contains: { numbers: ["none"] }, - }).toFindNothing() - }) - - it("fails to find row containing all", async () => { - await expectQuery({ - contains: { numbers: ["one", "two", "three"] }, - }).toFindNothing() - }) - - it("finds all with empty list", async () => { - await expectQuery({ contains: { numbers: [] } }).toContainExactly( - [{ numbers: ["one", "two"] }, { numbers: ["three"] }] - ) - }) - }) - - describe("notContains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notContains: { numbers: ["one"] }, - }).toContainExactly([{ numbers: ["three"] }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notContains: { numbers: ["one", "two", "three"] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - - // Not sure if this is correct behaviour but changing it would be a - // breaking change. - it("finds all with empty list", async () => { - await expectQuery({ - notContains: { numbers: [] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - }) - - describe("containsAny", () => { - it("successfully finds rows", async () => { - await expectQuery({ - containsAny: { numbers: ["one", "two", "three"] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - containsAny: { numbers: ["none"] }, - }).toFindNothing() - }) - - it("finds all with empty list", async () => { - await expectQuery({ - containsAny: { numbers: [] }, - }).toContainExactly([ - { numbers: ["one", "two"] }, - { numbers: ["three"] }, - ]) - }) - }) - }) - - describe("bigints", () => { - const SMALL = "1" - const MEDIUM = "10000000" - - // Our bigints are int64s in most datasources. - let BIG = "9223372036854775807" - - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - num: { name: "num", type: FieldType.BIGINT }, - }) - await createRows([{ num: SMALL }, { num: MEDIUM }, { num: BIG }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { num: SMALL } }).toContainExactly([ - { num: SMALL }, - ]) - }) - - it("successfully finds a big value", async () => { - await expectQuery({ equal: { num: BIG } }).toContainExactly([ - { num: BIG }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { num: "2" } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { num: SMALL } }).toContainExactly([ - { num: MEDIUM }, - { num: BIG }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { num: 10 } }).toContainExactly([ - { num: SMALL }, - { num: MEDIUM }, - { num: BIG }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { num: [SMALL] } }).toContainExactly([ - { num: SMALL }, - ]) - }) - - it("successfully finds all rows", async () => { - await expectQuery({ - oneOf: { num: [SMALL, MEDIUM, BIG] }, - }).toContainExactly([ - { num: SMALL }, - { num: MEDIUM }, - { num: BIG }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { num: [2] } }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { num: { low: SMALL, high: "5" } }, - }).toContainExactly([{ num: SMALL }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { num: { low: SMALL, high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { num: { low: MEDIUM, high: BIG } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { num: { low: "5", high: "5" } }, - }).toFindNothing() - }) - - it("can search using just a low value", async () => { - await expectQuery({ - range: { num: { low: MEDIUM } }, - }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) - }) - - it("can search using just a high value", async () => { - await expectQuery({ - range: { num: { high: MEDIUM } }, - }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) - }) - }) - }) - - isInternal && - describe("auto", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - auto: { - name: "auto", - type: FieldType.AUTO, - autocolumn: true, - subtype: AutoFieldSubType.AUTO_ID, - }, - }) - await createRows(new Array(10).fill({})) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ equal: { auto: 1 } }).toContainExactly([ - { auto: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { auto: 0 } }).toFindNothing() - }) - }) - - describe("not equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ notEqual: { auto: 1 } }).toContainExactly([ - { auto: 2 }, - { auto: 3 }, - { auto: 4 }, - { auto: 5 }, - { auto: 6 }, - { auto: 7 }, - { auto: 8 }, - { auto: 9 }, - { auto: 10 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ notEqual: { auto: 0 } }).toContainExactly([ - { auto: 1 }, - { auto: 2 }, - { auto: 3 }, - { auto: 4 }, - { auto: 5 }, - { auto: 6 }, - { auto: 7 }, - { auto: 8 }, - { auto: 9 }, - { auto: 10 }, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ oneOf: { auto: [1] } }).toContainExactly([ - { auto: 1 }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ oneOf: { auto: [0] } }).toFindNothing() - }) - }) - - describe("range", () => { - it("successfully finds a row", async () => { - await expectQuery({ - range: { auto: { low: 1, high: 1 } }, - }).toContainExactly([{ auto: 1 }]) - }) - - it("successfully finds multiple rows", async () => { - await expectQuery({ - range: { auto: { low: 1, high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }]) - }) - - it("successfully finds a row with a high bound", async () => { - await expectQuery({ - range: { auto: { low: 2, high: 2 } }, - }).toContainExactly([{ auto: 2 }]) - }) - - it("successfully finds no rows", async () => { - await expectQuery({ - range: { auto: { low: 0, high: 0 } }, - }).toFindNothing() - }) - - it("can search using just a low value", async () => { - await expectQuery({ - range: { auto: { low: 9 } }, - }).toContainExactly([{ auto: 9 }, { auto: 10 }]) - }) - - it("can search using just a high value", async () => { - await expectQuery({ - range: { auto: { high: 2 } }, - }).toContainExactly([{ auto: 1 }, { auto: 2 }]) - }) - }) - - describe("sort", () => { - it("sorts ascending", async () => { - await expectSearch({ - query: {}, - sort: "auto", - sortOrder: SortOrder.ASCENDING, - sortType: SortType.NUMBER, - }).toMatchExactly([ - { auto: 1 }, - { auto: 2 }, - { auto: 3 }, - { auto: 4 }, - { auto: 5 }, - { auto: 6 }, - { auto: 7 }, - { auto: 8 }, - { auto: 9 }, - { auto: 10 }, - ]) - }) - - it("sorts descending", async () => { - await expectSearch({ - query: {}, - sort: "auto", - sortOrder: SortOrder.DESCENDING, - sortType: SortType.NUMBER, - }).toMatchExactly([ - { auto: 10 }, - { auto: 9 }, - { auto: 8 }, - { auto: 7 }, - { auto: 6 }, - { auto: 5 }, - { auto: 4 }, - { auto: 3 }, - { auto: 2 }, - { auto: 1 }, - ]) - }) - - // This is important for pagination. The order of results must always - // be stable or pagination will break. We don't want the user to need - // to specify an order for pagination to work. - it("is stable without a sort specified", async () => { - let { rows: fullRowList } = await config.api.row.search( - tableOrViewId, - { - tableId: tableOrViewId, - query: {}, - } + const future = new Date( + serverTime.getTime() + 1000 * 60 * 60 * 24 * 30 ) - // repeat the search many times to check the first row is always the same - let bookmark: string | number | undefined, - hasNextPage: boolean | undefined = true, - rowCount: number = 0 - do { - const response = await config.api.row.search(tableOrViewId, { - tableId: tableOrViewId, + const rows = (currentUser: User) => { + return [ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + { + name: currentUser.firstName, + appointment: future.toISOString(), + }, + { + name: "serverDate", + appointment: serverTime.toISOString(), + }, + { + name: "single user, session user", + single_user: currentUser, + }, + { + name: "single user", + single_user: globalUsers[0], + }, + { + name: "deprecated single user, session user", + deprecated_single_user: [currentUser], + }, + { + name: "deprecated single user", + deprecated_single_user: [globalUsers[0]], + }, + { + name: "multi user", + multi_user: globalUsers, + }, + { + name: "multi user with session user", + multi_user: [...globalUsers, currentUser], + }, + { + name: "deprecated multi user", + deprecated_multi_user: globalUsers, + }, + { + name: "deprecated multi user with session user", + deprecated_multi_user: [...globalUsers, currentUser], + }, + ] + } + + beforeAll(async () => { + // Set up some global users + globalUsers = await Promise.all( + Array(2) + .fill(0) + .map(async () => { + const globalUser = await config.globalUser() + const userMedataId = globalUser._id + ? dbCore.generateUserMetadataID(globalUser._id) + : null + return { + _id: globalUser._id, + _meta: userMedataId, + } + }) + ) + + tableOrViewId = await createTableOrView({ + name: { name: "name", type: FieldType.STRING }, + appointment: { + name: "appointment", + type: FieldType.DATETIME, + }, + single_user: { + name: "single_user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + deprecated_single_user: { + name: "deprecated_single_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + }, + multi_user: { + name: "multi_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }, + deprecated_multi_user: { + name: "deprecated_multi_user", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USERS, + constraints: { + type: "array", + }, + }, + }) + await createRows(rows(config.getUser())) + }) + + // !! Current User is auto generated per run + it("should return all rows matching the session user firstname", async () => { + await expectQuery({ + equal: { name: "{{ [user].firstName }}" }, + }).toContainExactly([ + { + name: config.getUser().firstName, + appointment: future.toISOString(), + }, + ]) + }) + + it("should return all rows matching the session user firstname when logical operator used", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { name: "{{ [user].firstName }}" } }, + ], + }, + }).toContainExactly([ + { + name: config.getUser().firstName, + appointment: future.toISOString(), + }, + ]) + }) + + it("should parse the date binding and return all rows after the resolved value", async () => { + await tk.withFreeze(serverTime, async () => { + await expectQuery({ + range: { + appointment: { + low: "{{ [now] }}", + high: "9999-00-00T00:00:00.000Z", + }, + }, + }).toContainExactly([ + { + name: config.getUser().firstName, + appointment: future.toISOString(), + }, + { + name: "serverDate", + appointment: serverTime.toISOString(), + }, + ]) + }) + }) + + it("should parse the date binding and return all rows before the resolved value", async () => { + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: "{{ [now] }}", + }, + }, + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + { + name: "serverDate", + appointment: serverTime.toISOString(), + }, + ]) + }) + + it("should parse the encoded js snippet. Return rows with appointments up to 1 week in the past", async () => { + const jsBinding = "return snippets.WeeksAgo();" + const encodedBinding = encodeJSBinding(jsBinding) + + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: encodedBinding, + }, + }, + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + ]) + }) + + it("should parse the encoded js binding. Return rows with appointments 2 weeks in the past", async () => { + const jsBinding = `const currentTime = new Date(${Date.now()})\ncurrentTime.setDate(currentTime.getDate()-14);\nreturn currentTime.toISOString();` + const encodedBinding = encodeJSBinding(jsBinding) + + await expectQuery({ + range: { + appointment: { + low: "0000-00-00T00:00:00.000Z", + high: encodedBinding, + }, + }, + }).toContainExactly([ + { name: "foo", appointment: "1982-01-05T00:00:00.000Z" }, + { name: "bar", appointment: "1995-05-06T00:00:00.000Z" }, + ]) + }) + + it("should match a single user row by the session user id", async () => { + await expectQuery({ + equal: { single_user: "{{ [user]._id }}" }, + }).toContainExactly([ + { + name: "single user, session user", + single_user: { _id: config.getUser()._id }, + }, + ]) + }) + + it("should match a deprecated single user row by the session user id", async () => { + await expectQuery({ + equal: { deprecated_single_user: "{{ [user]._id }}" }, + }).toContainExactly([ + { + name: "deprecated single user, session user", + deprecated_single_user: [{ _id: config.getUser()._id }], + }, + ]) + }) + + it("should match the session user id in a multi user field", async () => { + const allUsers = [...globalUsers, config.getUser()].map( + (user: any) => { + return { _id: user._id } + } + ) + + await expectQuery({ + contains: { multi_user: ["{{ [user]._id }}"] }, + }).toContainExactly([ + { + name: "multi user with session user", + multi_user: allUsers, + }, + ]) + }) + + it("should match the session user id in a deprecated multi user field", async () => { + const allUsers = [...globalUsers, config.getUser()].map( + (user: any) => { + return { _id: user._id } + } + ) + + await expectQuery({ + contains: { deprecated_multi_user: ["{{ [user]._id }}"] }, + }).toContainExactly([ + { + name: "deprecated multi user with session user", + deprecated_multi_user: allUsers, + }, + ]) + }) + + it("should not match the session user id in a multi user field", async () => { + await expectQuery({ + notContains: { multi_user: ["{{ [user]._id }}"] }, + notEmpty: { multi_user: true }, + }).toContainExactly([ + { + name: "multi user", + multi_user: globalUsers.map((user: any) => { + return { _id: user._id } + }), + }, + ]) + }) + + it("should not match the session user id in a deprecated multi user field", async () => { + await expectQuery({ + notContains: { + deprecated_multi_user: ["{{ [user]._id }}"], + }, + notEmpty: { deprecated_multi_user: true }, + }).toContainExactly([ + { + name: "deprecated multi user", + deprecated_multi_user: globalUsers.map((user: any) => { + return { _id: user._id } + }), + }, + ]) + }) + + it("should match the session user id and a user table row id using helpers, user binding and a static user id.", async () => { + await expectQuery({ + oneOf: { + single_user: [ + "{{ default [user]._id '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "single user, session user", + single_user: { _id: config.getUser()._id }, + }, + { + name: "single user", + single_user: { _id: globalUsers[0]._id }, + }, + ]) + }) + + it("should match the session user id and a user table row id using helpers, user binding and a static user id. (deprecated single user)", async () => { + await expectQuery({ + oneOf: { + deprecated_single_user: [ + "{{ default [user]._id '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "deprecated single user, session user", + deprecated_single_user: [{ _id: config.getUser()._id }], + }, + { + name: "deprecated single user", + deprecated_single_user: [{ _id: globalUsers[0]._id }], + }, + ]) + }) + + it("should resolve 'default' helper to '_empty_' when binding resolves to nothing", async () => { + await expectQuery({ + oneOf: { + single_user: [ + "{{ default [user]._idx '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "single user", + single_user: { _id: globalUsers[0]._id }, + }, + ]) + }) + + it("should resolve 'default' helper to '_empty_' when binding resolves to nothing (deprecated single user)", async () => { + await expectQuery({ + oneOf: { + deprecated_single_user: [ + "{{ default [user]._idx '_empty_' }}", + globalUsers[0]._id, + ], + }, + }).toContainExactly([ + { + name: "deprecated single user", + deprecated_single_user: [{ _id: globalUsers[0]._id }], + }, + ]) + }) + }) + + const stringTypes = [FieldType.STRING, FieldType.LONGFORM] as const + describe.each(stringTypes)("%s", type => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { name: "name", type }, + }) + await createRows([{ name: "foo" }, { name: "bar" }]) + }) + + describe("misc", () => { + it("should return all if no query is passed", async () => { + await expectSearch({} as RowSearchParams).toContainExactly([ + { name: "foo" }, + { name: "bar" }, + ]) + }) + + it("should return all if empty query is passed", async () => { + await expectQuery({}).toContainExactly([ + { name: "foo" }, + { name: "bar" }, + ]) + }) + + it("should return all if onEmptyFilter is RETURN_ALL", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("should return nothing if onEmptyFilter is RETURN_NONE", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + }).toFindNothing() + }) + + it("should respect limit", async () => { + await expectSearch({ limit: 1, paginate: true, query: {}, - bookmark, - }) - bookmark = response.bookmark - hasNextPage = response.hasNextPage - expect(response.rows.length).toEqual(1) - const foundRow = response.rows[0] - expect(foundRow).toEqual(fullRowList[rowCount++]) - } while (hasNextPage) + }).toHaveLength(1) + }) }) - }) - describe("pagination", () => { - it("should paginate through all rows", async () => { - // @ts-ignore - let bookmark: string | number = undefined - let rows: Row[] = [] + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ + equal: { name: "foo" }, + }).toContainExactly([{ name: "foo" }]) + }) - // eslint-disable-next-line no-constant-condition - while (true) { - const response = await config.api.row.search(tableOrViewId, { - tableId: tableOrViewId, - limit: 3, + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { name: "none" } }).toFindNothing() + }) + + it("works as an or condition", async () => { + await expectQuery({ + allOr: true, + equal: { name: "foo" }, + oneOf: { name: ["bar"] }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("can have multiple values for same column", async () => { + await expectQuery({ + allOr: true, + equal: { "1:name": "foo", "2:name": "bar" }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { name: "foo" }, + }).toContainExactly([{ name: "bar" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { name: "bar" }, + }).toContainExactly([{ name: "foo" }]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { name: ["foo"] }, + }).toContainExactly([{ name: "foo" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { name: ["none"] }, + }).toFindNothing() + }) + + it("can have multiple values for same column", async () => { + await expectQuery({ + oneOf: { + name: ["foo", "bar"], + }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("splits comma separated strings", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + name: "foo,bar", + }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("trims whitespace", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + name: "foo, bar", + }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("empty arrays returns all when onEmptyFilter is set to return 'all'", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + oneOf: { name: [] }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("empty arrays returns all when onEmptyFilter is set to return 'none'", async () => { + await expectQuery({ + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + oneOf: { name: [] }, + }).toContainExactly([]) + }) + }) + + describe("fuzzy", () => { + it("successfully finds a row", async () => { + await expectQuery({ fuzzy: { name: "oo" } }).toContainExactly( + [{ name: "foo" }] + ) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ fuzzy: { name: "none" } }).toFindNothing() + }) + }) + + describe("string", () => { + it("successfully finds a row", async () => { + await expectQuery({ + string: { name: "fo" }, + }).toContainExactly([{ name: "foo" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + string: { name: "none" }, + }).toFindNothing() + }) + + it("is case-insensitive", async () => { + await expectQuery({ + string: { name: "FO" }, + }).toContainExactly([{ name: "foo" }]) + }) + }) + + describe("range", () => { + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { name: { low: "a", high: "z" } }, + }).toContainExactly([{ name: "bar" }, { name: "foo" }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { name: { low: "a", high: "c" } }, + }).toContainExactly([{ name: "bar" }]) + }) + + it("successfully finds a row with a low bound", async () => { + await expectQuery({ + range: { name: { low: "f", high: "z" } }, + }).toContainExactly([{ name: "foo" }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { name: { low: "g", high: "h" } }, + }).toFindNothing() + }) + + it("ignores low if it's an empty object", async () => { + await expectQuery({ + // @ts-ignore + range: { name: { low: {}, high: "z" } }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("ignores high if it's an empty object", async () => { + await expectQuery({ + // @ts-ignore + range: { name: { low: "a", high: {} } }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) + + describe("empty", () => { + it("finds no empty rows", async () => { + await expectQuery({ empty: { name: null } }).toFindNothing() + }) + + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ + empty: { name: null }, + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + }).toFindNothing() + }) + }) + + describe("notEmpty", () => { + it("finds all non-empty rows", async () => { + await expectQuery({ + notEmpty: { name: null }, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + + it("should not be affected by when filter empty behaviour", async () => { + await expectQuery({ + notEmpty: { name: null }, + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + }).toContainExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ query: {}, - bookmark, - paginate: true, + sort: "name", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) + + describe("sortType STRING", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "name", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }]) }) - rows.push(...response.rows) + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "name", + sortType: SortType.STRING, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }]) + }) + }) - if (!response.bookmark || !response.hasNextPage) { - break - } - bookmark = response.bookmark - } + !isInternal && + !isInMemory && + // This test was added because we automatically add in a sort by the + // primary key, and we used to do this unconditionally which caused + // problems because it was possible for the primary key to appear twice + // in the resulting SQL ORDER BY clause, resulting in an SQL error. + // We now check first to make sure that the primary key isn't already + // in the sort before adding it. + describe("sort on primary key", () => { + beforeAll(async () => { + const tableName = structures.uuid().substring(0, 10) + await client!.schema.createTable(tableName, t => { + t.string("name").primary() + }) + const resp = await config.api.datasource.fetchSchema({ + datasourceId: datasource!._id!, + }) - const autoValues = rows - .map(row => row.auto) - .sort((a, b) => a - b) - expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + tableOrViewId = resp.datasource.entities![tableName]._id! + + await createRows([{ name: "foo" }, { name: "bar" }]) + }) + + it("should be able to sort by a primary key column ascending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ name: "bar" }, { name: "foo" }])) + + it("should be able to sort by a primary key column descending", async () => + expectSearch({ + query: {}, + sort: "name", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ name: "foo" }, { name: "bar" }])) + }) }) }) - }) - describe("field name 1:name", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - "1:name": { name: "1:name", type: FieldType.STRING }, - }) - await createRows([{ "1:name": "bar" }, { "1:name": "foo" }]) - }) - - it("successfully finds a row", async () => { - await expectQuery({ - equal: { "1:1:name": "bar" }, - }).toContainExactly([{ "1:name": "bar" }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { "1:1:name": "none" } }).toFindNothing() - }) - }) - - isSql && - describe("related formulas", () => { - beforeAll(async () => { - const arrayTable = await createTable({ - name: { name: "name", type: FieldType.STRING }, - array: { - name: "array", - type: FieldType.ARRAY, - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["option 1", "option 2"], - }, - }, + describe("numbers", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + age: { name: "age", type: FieldType.NUMBER }, + }) + await createRows([{ age: 1 }, { age: 10 }]) }) - tableOrViewId = await createTableOrView({ - relationship: { - type: FieldType.LINK, - relationshipType: RelationshipType.MANY_TO_ONE, - name: "relationship", - fieldName: "relate", - tableId: arrayTable, - constraints: { - type: "array", - }, - }, - formula: { - type: FieldType.FORMULA, - name: "formula", - formula: encodeJSBinding( - `let array = [];$("relationship").forEach(rel => array = array.concat(rel.array));return array.sort().join(",")` - ), - }, - }) - const arrayRows = await Promise.all([ - config.api.row.save(arrayTable, { - name: "foo", - array: ["option 1"], - }), - config.api.row.save(arrayTable, { - name: "bar", - array: ["option 2"], - }), - ]) - await Promise.all([ - config.api.row.save(tableOrViewId, { - relationship: [arrayRows[0]._id, arrayRows[1]._id], - }), - ]) - }) - it("formula is correct with relationship arrays", async () => { - await expectQuery({}).toContain([ - { formula: "option 1,option 2" }, - ]) - }) - }) - - describe("user", () => { - let user1: User - let user2: User - - beforeAll(async () => { - user1 = await config.createUser({ _id: `us_${utils.newid()}` }) - user2 = await config.createUser({ _id: `us_${utils.newid()}` }) - - tableOrViewId = await createTableOrView({ - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - }) - - await createRows([{ user: user1 }, { user: user2 }, { user: null }]) - }) - - describe("equal", () => { - it("successfully finds a row", async () => { - await expectQuery({ - equal: { user: user1._id }, - }).toContainExactly([{ user: { _id: user1._id } }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ equal: { user: "us_none" } }).toFindNothing() - }) - }) - - describe("notEqual", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notEqual: { user: user1._id }, - }).toContainExactly([{ user: { _id: user2._id } }, {}]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notEqual: { user: "us_none" }, - }).toContainExactly([ - { user: { _id: user1._id } }, - { user: { _id: user2._id } }, - {}, - ]) - }) - }) - - describe("oneOf", () => { - it("successfully finds a row", async () => { - await expectQuery({ - oneOf: { user: [user1._id] }, - }).toContainExactly([{ user: { _id: user1._id } }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - oneOf: { user: ["us_none"] }, - }).toFindNothing() - }) - }) - - describe("empty", () => { - it("finds empty rows", async () => { - await expectQuery({ empty: { user: null } }).toContainExactly([ - {}, - ]) - }) - }) - - describe("notEmpty", () => { - it("finds non-empty rows", async () => { - await expectQuery({ notEmpty: { user: null } }).toContainExactly([ - { user: { _id: user1._id } }, - { user: { _id: user2._id } }, - ]) - }) - }) - }) - - describe("multi user", () => { - let user1: User - let user2: User - - beforeAll(async () => { - user1 = await config.createUser({ _id: `us_${utils.newid()}` }) - user2 = await config.createUser({ _id: `us_${utils.newid()}` }) - - tableOrViewId = await createTableOrView({ - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { type: "array" }, - }, - number: { - name: "number", - type: FieldType.NUMBER, - }, - }) - - await createRows([ - { number: 1, users: [user1] }, - { number: 2, users: [user2] }, - { number: 3, users: [user1, user2] }, - { number: 4, users: [] }, - ]) - }) - - describe("contains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - contains: { users: [user1._id] }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - ]) - }) - - it("successfully finds a row searching with a string", async () => { - await expectQuery({ - // @ts-expect-error this test specifically goes against the type to - // test that we coerce the string to an array. - contains: { "1:users": user1._id }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - contains: { users: ["us_none"] }, - }).toFindNothing() - }) - }) - - describe("notContains", () => { - it("successfully finds a row", async () => { - await expectQuery({ - notContains: { users: [user1._id] }, - }).toContainExactly([{ users: [{ _id: user2._id }] }, {}]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - notContains: { users: ["us_none"] }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user2._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - {}, - ]) - }) - }) - - describe("containsAny", () => { - it("successfully finds rows", async () => { - await expectQuery({ - containsAny: { users: [user1._id, user2._id] }, - }).toContainExactly([ - { users: [{ _id: user1._id }] }, - { users: [{ _id: user2._id }] }, - { users: [{ _id: user1._id }, { _id: user2._id }] }, - ]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - containsAny: { users: ["us_none"] }, - }).toFindNothing() - }) - }) - - describe("multi-column equals", () => { - it("successfully finds a row", async () => { - await expectQuery({ - equal: { number: 1 }, - contains: { users: [user1._id] }, - }).toContainExactly([{ users: [{ _id: user1._id }], number: 1 }]) - }) - - it("fails to find nonexistent row", async () => { - await expectQuery({ - equal: { number: 2 }, - contains: { users: [user1._id] }, - }).toFindNothing() - }) - }) - }) - - // It also can't work for in-memory searching because the related table name - // isn't available. - !isInMemory && - describe.each([ - RelationshipType.ONE_TO_MANY, - RelationshipType.MANY_TO_ONE, - RelationshipType.MANY_TO_MANY, - ])("relations (%s)", relationshipType => { - let productCategoryTable: Table, productCatRows: Row[] - - beforeAll(async () => { - const { relatedTable, tableId } = await basicRelationshipTables( - relationshipType - ) - tableOrViewId = tableId - productCategoryTable = relatedTable - - productCatRows = await Promise.all([ - config.api.row.save(productCategoryTable._id!, { name: "foo" }), - config.api.row.save(productCategoryTable._id!, { name: "bar" }), - ]) - - await Promise.all([ - config.api.row.save(tableOrViewId, { - name: "foo", - productCat: [productCatRows[0]._id], - }), - config.api.row.save(tableOrViewId, { - name: "bar", - productCat: [productCatRows[1]._id], - }), - config.api.row.save(tableOrViewId, { - name: "baz", - productCat: [], - }), - ]) - }) - - it("should be able to filter by relationship using column name", async () => { - await expectQuery({ - equal: { ["productCat.name"]: "foo" }, - }).toContainExactly([ - { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, - ]) - }) - - it("should be able to filter by relationship using table name", async () => { - await expectQuery({ - equal: { [`${productCategoryTable.name}.name`]: "foo" }, - }).toContainExactly([ - { name: "foo", productCat: [{ _id: productCatRows[0]._id }] }, - ]) - }) - - it("shouldn't return any relationship for last row", async () => { - await expectQuery({ - equal: { ["name"]: "baz" }, - }).toContainExactly([{ name: "baz", productCat: undefined }]) - }) - - describe("logical filters", () => { - const logicalOperators = [LogicalOperator.AND, LogicalOperator.OR] - - describe("$and", () => { - it("should allow single conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ equal: { age: 1 } }).toContainExactly([ + { age: 1 }, ]) }) - it("should allow exclusive conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }).toContainExactly([]) + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { age: 2 } }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ notEqual: { age: 1 } }).toContainExactly([ + { age: 10 }, + ]) }) - it.each([logicalOperators])( - "should allow nested ands with single conditions (with %s as root)", - async rootOperator => { + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { age: 10 } }).toContainExactly( + [{ age: 1 }] + ) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ oneOf: { age: [1] } }).toContainExactly([ + { age: 1 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { age: [2] } }).toFindNothing() + }) + + it("can convert from a string", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + age: "1", + }, + }).toContainExactly([{ age: 1 }]) + }) + + it("can find multiple values for same column", async () => { + await expectQuery({ + oneOf: { + // @ts-ignore + age: "1,10", + }, + }).toContainExactly([{ age: 1 }, { age: 10 }]) + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { age: { low: 1, high: 5 } }, + }).toContainExactly([{ age: 1 }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { age: { low: 1, high: 10 } }, + }).toContainExactly([{ age: 1 }, { age: 10 }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { age: { low: 5, high: 10 } }, + }).toContainExactly([{ age: 10 }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { age: { low: 5, high: 9 } }, + }).toFindNothing() + }) + + it("greater than equal to", async () => { + await expectQuery({ + range: { + age: { low: 10, high: Number.MAX_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 10 }]) + }) + + it("greater than", async () => { + await expectQuery({ + range: { + age: { low: 5, high: Number.MAX_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 10 }]) + }) + + it("less than equal to", async () => { + await expectQuery({ + range: { + age: { high: 1, low: Number.MIN_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 1 }]) + }) + + it("less than", async () => { + await expectQuery({ + range: { + age: { high: 5, low: Number.MIN_SAFE_INTEGER }, + }, + }).toContainExactly([{ age: 1 }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) + }) + + describe("sortType NUMBER", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortType: SortType.NUMBER, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ age: 1 }, { age: 10 }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "age", + sortType: SortType.NUMBER, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ age: 10 }, { age: 1 }]) + }) + }) + }) + + describe("dates", () => { + const JAN_1ST = "2020-01-01T00:00:00.000Z" + const JAN_2ND = "2020-01-02T00:00:00.000Z" + const JAN_5TH = "2020-01-05T00:00:00.000Z" + const JAN_9TH = "2020-01-09T00:00:00.000Z" + const JAN_10TH = "2020-01-10T00:00:00.000Z" + + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + dob: { name: "dob", type: FieldType.DATETIME }, + }) + + await createRows([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ + equal: { dob: JAN_1ST }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { dob: JAN_2ND } }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { dob: JAN_1ST }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { dob: JAN_10TH }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { dob: [JAN_1ST] }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { dob: [JAN_2ND] }, + }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { dob: { low: JAN_1ST, high: JAN_5TH } }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { dob: { low: JAN_1ST, high: JAN_10TH } }, + }).toContainExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { dob: { low: JAN_5TH, high: JAN_10TH } }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { dob: { low: JAN_5TH, high: JAN_9TH } }, + }).toFindNothing() + }) + + it("greater than equal to", async () => { + await expectQuery({ + range: { + dob: { + low: JAN_10TH, + high: MAX_VALID_DATE.toISOString(), + }, + }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("greater than", async () => { + await expectQuery({ + range: { + dob: { low: JAN_5TH, high: MAX_VALID_DATE.toISOString() }, + }, + }).toContainExactly([{ dob: JAN_10TH }]) + }) + + it("less than equal to", async () => { + await expectQuery({ + range: { + dob: { high: JAN_1ST, low: MIN_VALID_DATE.toISOString() }, + }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + + it("less than", async () => { + await expectQuery({ + range: { + dob: { high: JAN_5TH, low: MIN_VALID_DATE.toISOString() }, + }, + }).toContainExactly([{ dob: JAN_1ST }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) + + describe("sortType STRING", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([{ dob: JAN_1ST }, { dob: JAN_10TH }]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "dob", + sortType: SortType.STRING, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([{ dob: JAN_10TH }, { dob: JAN_1ST }]) + }) + }) + }) + }) + + !isInternal && + describe("datetime - time only", () => { + const T_1000 = "10:00:00" + const T_1045 = "10:45:00" + const T_1200 = "12:00:00" + const T_1530 = "15:30:00" + const T_0000 = "00:00:00" + + const UNEXISTING_TIME = "10:01:00" + + const NULL_TIME__ID = `null_time__id` + + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + timeid: { name: "timeid", type: FieldType.STRING }, + time: { + name: "time", + type: FieldType.DATETIME, + timeOnly: true, + }, + }) + + await createRows([ + { timeid: NULL_TIME__ID, time: null }, + { time: T_1000 }, + { time: T_1045 }, + { time: T_1200 }, + { time: T_1530 }, + { time: T_0000 }, + ]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { await expectQuery({ - [rootOperator]: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], + equal: { time: T_1000 }, + }).toContainExactly([{ time: "10:00:00" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { time: UNEXISTING_TIME }, + }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { time: T_1000 }, + }).toContainExactly([ + { timeid: NULL_TIME__ID }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + { time: "00:00:00" }, + ]) + }) + + it("return all when requesting non-existing", async () => { + await expectQuery({ + notEqual: { time: UNEXISTING_TIME }, + }).toContainExactly([ + { timeid: NULL_TIME__ID }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + { time: "00:00:00" }, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { time: [T_1000] }, + }).toContainExactly([{ time: "10:00:00" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { time: [UNEXISTING_TIME] }, + }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { time: { low: T_1045, high: T_1045 } }, + }).toContainExactly([{ time: "10:45:00" }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { time: { low: T_1045, high: T_1530 } }, + }).toContainExactly([ + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + ]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { + time: { low: UNEXISTING_TIME, high: UNEXISTING_TIME }, + }, + }).toFindNothing() + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([ + { timeid: NULL_TIME__ID }, + { time: "00:00:00" }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + ]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([ + { time: "15:30:00" }, + { time: "12:00:00" }, + { time: "10:45:00" }, + { time: "10:00:00" }, + { time: "00:00:00" }, + { timeid: NULL_TIME__ID }, + ]) + }) + + describe("sortType STRING", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortType: SortType.STRING, + sortOrder: SortOrder.ASCENDING, + }).toMatchExactly([ + { timeid: NULL_TIME__ID }, + { time: "00:00:00" }, + { time: "10:00:00" }, + { time: "10:45:00" }, + { time: "12:00:00" }, + { time: "15:30:00" }, + ]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "time", + sortType: SortType.STRING, + sortOrder: SortOrder.DESCENDING, + }).toMatchExactly([ + { time: "15:30:00" }, + { time: "12:00:00" }, + { time: "10:45:00" }, + { time: "10:00:00" }, + { time: "00:00:00" }, + { timeid: NULL_TIME__ID }, + ]) + }) + }) + }) + }) + + isInternal && + !isInMemory && + describe("AI Column", () => { + const UNEXISTING_AI_COLUMN = "Real LLM Response" + + beforeAll(async () => { + mocks.licenses.useBudibaseAI() + mocks.licenses.useAICustomConfigs() + + tableOrViewId = await createTableOrView({ + product: { name: "product", type: FieldType.STRING }, + ai: { + name: "AI", + type: FieldType.AI, + operation: AIOperationEnum.PROMPT, + prompt: "Translate '{{ product }}' into German", + }, + }) + + await createRows([ + { product: "Big Mac" }, + { product: "McCrispy" }, + ]) + }) + + describe("equal", () => { + it("successfully finds rows based on AI column", async () => { + await expectQuery({ + equal: { ai: "Mock LLM Response" }, + }).toContainExactly([ + { product: "Big Mac" }, + { product: "McCrispy" }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { ai: UNEXISTING_AI_COLUMN }, + }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("Returns nothing when searching notEqual on the mock AI response", async () => { + await expectQuery({ + notEqual: { ai: "Mock LLM Response" }, + }).toContainExactly([]) + }) + + it("return all when requesting non-existing response", async () => { + await expectQuery({ + notEqual: { ai: "Real LLM Response" }, + }).toContainExactly([ + { product: "Big Mac" }, + { product: "McCrispy" }, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { + ai: ["Mock LLM Response", "Other LLM Response"], }, }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, + { product: "Big Mac" }, + { product: "McCrispy" }, ]) - } - ) + }) - it.each([logicalOperators])( - "should allow nested ands with exclusive conditions (with %s as root)", - async rootOperator => { + it("fails to find nonexistent row", async () => { await expectQuery({ - [rootOperator]: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], - }, - }).toContainExactly([]) - } - ) - - it.each([logicalOperators])( - "should allow nested ands with multiple conditions (with %s as root)", - async rootOperator => { - await expectQuery({ - [rootOperator]: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }).toContainExactly([]) - } - ) + oneOf: { ai: ["Whopper"] }, + }).toFindNothing() + }) + }) }) - describe("$ors", () => { - it("should allow single conditions", async () => { + describe("arrays", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + numbers: { + name: "numbers", + type: FieldType.ARRAY, + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["one", "two", "three"], + }, + }, + }) + await createRows([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + + describe("contains", () => { + it("successfully finds a row", async () => { await expectQuery({ - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, + contains: { numbers: ["one"] }, + }).toContainExactly([{ numbers: ["one", "two"] }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + contains: { numbers: ["none"] }, + }).toFindNothing() + }) + + it("fails to find row containing all", async () => { + await expectQuery({ + contains: { numbers: ["one", "two", "three"] }, + }).toFindNothing() + }) + + it("finds all with empty list", async () => { + await expectQuery({ + contains: { numbers: [] }, }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + }) + + describe("notContains", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { numbers: ["one"] }, + }).toContainExactly([{ numbers: ["three"] }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notContains: { numbers: ["one", "two", "three"] }, + }).toContainExactly([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, ]) }) - it("should allow exclusive conditions", async () => { + // Not sure if this is correct behaviour but changing it would be a + // breaking change. + it("finds all with empty list", async () => { await expectQuery({ - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, + notContains: { numbers: [] }, }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + }) + + describe("containsAny", () => { + it("successfully finds rows", async () => { + await expectQuery({ + containsAny: { numbers: ["one", "two", "three"] }, + }).toContainExactly([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { numbers: ["none"] }, + }).toFindNothing() + }) + + it("finds all with empty list", async () => { + await expectQuery({ + containsAny: { numbers: [] }, + }).toContainExactly([ + { numbers: ["one", "two"] }, + { numbers: ["three"] }, + ]) + }) + }) + }) + + describe("bigints", () => { + const SMALL = "1" + const MEDIUM = "10000000" + + // Our bigints are int64s in most datasources. + let BIG = "9223372036854775807" + + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + num: { name: "num", type: FieldType.BIGINT }, + }) + await createRows([ + { num: SMALL }, + { num: MEDIUM }, + { num: BIG }, + ]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ equal: { num: SMALL } }).toContainExactly( + [{ num: SMALL }] + ) + }) + + it("successfully finds a big value", async () => { + await expectQuery({ equal: { num: BIG } }).toContainExactly([ + { num: BIG }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { num: "2" } }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { num: SMALL }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ notEqual: { num: 10 } }).toContainExactly( + [{ num: SMALL }, { num: MEDIUM }, { num: BIG }] + ) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { num: [SMALL] }, + }).toContainExactly([{ num: SMALL }]) + }) + + it("successfully finds all rows", async () => { + await expectQuery({ + oneOf: { num: [SMALL, MEDIUM, BIG] }, + }).toContainExactly([ + { num: SMALL }, + { num: MEDIUM }, + { num: BIG }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { num: [2] } }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { num: { low: SMALL, high: "5" } }, + }).toContainExactly([{ num: SMALL }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { num: { low: SMALL, high: MEDIUM } }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { num: { low: MEDIUM, high: BIG } }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { num: { low: "5", high: "5" } }, + }).toFindNothing() + }) + + it("can search using just a low value", async () => { + await expectQuery({ + range: { num: { low: MEDIUM } }, + }).toContainExactly([{ num: MEDIUM }, { num: BIG }]) + }) + + it("can search using just a high value", async () => { + await expectQuery({ + range: { num: { high: MEDIUM } }, + }).toContainExactly([{ num: SMALL }, { num: MEDIUM }]) + }) + }) + }) + + isInternal && + describe("auto", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + auto: { + name: "auto", + type: FieldType.AUTO, + autocolumn: true, + subtype: AutoFieldSubType.AUTO_ID, }, - { + }) + await createRows(new Array(10).fill({})) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ equal: { auto: 1 } }).toContainExactly([ + { auto: 1 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ equal: { auto: 0 } }).toFindNothing() + }) + }) + + describe("not equal", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { auto: 1 }, + }).toContainExactly([ + { auto: 2 }, + { auto: 3 }, + { auto: 4 }, + { auto: 5 }, + { auto: 6 }, + { auto: 7 }, + { auto: 8 }, + { auto: 9 }, + { auto: 10 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { auto: 0 }, + }).toContainExactly([ + { auto: 1 }, + { auto: 2 }, + { auto: 3 }, + { auto: 4 }, + { auto: 5 }, + { auto: 6 }, + { auto: 7 }, + { auto: 8 }, + { auto: 9 }, + { auto: 10 }, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { auto: [1] }, + }).toContainExactly([{ auto: 1 }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ oneOf: { auto: [0] } }).toFindNothing() + }) + }) + + describe("range", () => { + it("successfully finds a row", async () => { + await expectQuery({ + range: { auto: { low: 1, high: 1 } }, + }).toContainExactly([{ auto: 1 }]) + }) + + it("successfully finds multiple rows", async () => { + await expectQuery({ + range: { auto: { low: 1, high: 2 } }, + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) + + it("successfully finds a row with a high bound", async () => { + await expectQuery({ + range: { auto: { low: 2, high: 2 } }, + }).toContainExactly([{ auto: 2 }]) + }) + + it("successfully finds no rows", async () => { + await expectQuery({ + range: { auto: { low: 0, high: 0 } }, + }).toFindNothing() + }) + + it("can search using just a low value", async () => { + await expectQuery({ + range: { auto: { low: 9 } }, + }).toContainExactly([{ auto: 9 }, { auto: 10 }]) + }) + + it("can search using just a high value", async () => { + await expectQuery({ + range: { auto: { high: 2 } }, + }).toContainExactly([{ auto: 1 }, { auto: 2 }]) + }) + }) + + describe("sort", () => { + it("sorts ascending", async () => { + await expectSearch({ + query: {}, + sort: "auto", + sortOrder: SortOrder.ASCENDING, + sortType: SortType.NUMBER, + }).toMatchExactly([ + { auto: 1 }, + { auto: 2 }, + { auto: 3 }, + { auto: 4 }, + { auto: 5 }, + { auto: 6 }, + { auto: 7 }, + { auto: 8 }, + { auto: 9 }, + { auto: 10 }, + ]) + }) + + it("sorts descending", async () => { + await expectSearch({ + query: {}, + sort: "auto", + sortOrder: SortOrder.DESCENDING, + sortType: SortType.NUMBER, + }).toMatchExactly([ + { auto: 10 }, + { auto: 9 }, + { auto: 8 }, + { auto: 7 }, + { auto: 6 }, + { auto: 5 }, + { auto: 4 }, + { auto: 3 }, + { auto: 2 }, + { auto: 1 }, + ]) + }) + + // This is important for pagination. The order of results must always + // be stable or pagination will break. We don't want the user to need + // to specify an order for pagination to work. + it("is stable without a sort specified", async () => { + let { rows: fullRowList } = await config.api.row.search( + tableOrViewId, + { + tableId: tableOrViewId, + query: {}, + } + ) + + // repeat the search many times to check the first row is always the same + let bookmark: string | number | undefined, + hasNextPage: boolean | undefined = true, + rowCount: number = 0 + do { + const response = await config.api.row.search( + tableOrViewId, + { + tableId: tableOrViewId, + limit: 1, + paginate: true, + query: {}, + bookmark, + } + ) + bookmark = response.bookmark + hasNextPage = response.hasNextPage + expect(response.rows.length).toEqual(1) + const foundRow = response.rows[0] + expect(foundRow).toEqual(fullRowList[rowCount++]) + } while (hasNextPage) + }) + }) + + describe("pagination", () => { + it("should paginate through all rows", async () => { + // @ts-ignore + let bookmark: string | number = undefined + let rows: Row[] = [] + + // eslint-disable-next-line no-constant-condition + while (true) { + const response = await config.api.row.search( + tableOrViewId, + { + tableId: tableOrViewId, + limit: 3, + query: {}, + bookmark, + paginate: true, + } + ) + + rows.push(...response.rows) + + if (!response.bookmark || !response.hasNextPage) { + break + } + bookmark = response.bookmark + } + + const autoValues = rows + .map(row => row.auto) + .sort((a, b) => a - b) + expect(autoValues).toEqual([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) + }) + }) + }) + + describe("field name 1:name", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + "1:name": { name: "1:name", type: FieldType.STRING }, + }) + await createRows([{ "1:name": "bar" }, { "1:name": "foo" }]) + }) + + it("successfully finds a row", async () => { + await expectQuery({ + equal: { "1:1:name": "bar" }, + }).toContainExactly([{ "1:name": "bar" }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { "1:1:name": "none" }, + }).toFindNothing() + }) + }) + + isSql && + describe("related formulas", () => { + beforeAll(async () => { + const arrayTable = await createTable({ + name: { name: "name", type: FieldType.STRING }, + array: { + name: "array", + type: FieldType.ARRAY, + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["option 1", "option 2"], + }, + }, + }) + tableOrViewId = await createTableOrView({ + relationship: { + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_ONE, + name: "relationship", + fieldName: "relate", + tableId: arrayTable, + constraints: { + type: "array", + }, + }, + formula: { + type: FieldType.FORMULA, + name: "formula", + formula: encodeJSBinding( + `let array = [];$("relationship").forEach(rel => array = array.concat(rel.array));return array.sort().join(",")` + ), + }, + }) + const arrayRows = await Promise.all([ + config.api.row.save(arrayTable, { + name: "foo", + array: ["option 1"], + }), + config.api.row.save(arrayTable, { name: "bar", - productCat: [{ _id: productCatRows[1]._id }], - }, - { name: "baz", productCat: undefined }, + array: ["option 2"], + }), + ]) + await Promise.all([ + config.api.row.save(tableOrViewId, { + relationship: [arrayRows[0]._id, arrayRows[1]._id], + }), ]) }) - it.each([logicalOperators])( - "should allow nested ors with single conditions (with %s as root)", - async rootOperator => { - await expectQuery({ - [rootOperator]: { - conditions: [ - { - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], - }, - }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, - ]) - } - ) + it("formula is correct with relationship arrays", async () => { + await expectQuery({}).toContain([ + { formula: "option 1,option 2" }, + ]) + }) + }) - it.each([logicalOperators])( - "should allow nested ors with exclusive conditions (with %s as root)", - async rootOperator => { - await expectQuery({ - [rootOperator]: { - conditions: [ - { - $or: { - conditions: [ - { - equal: { ["productCat.name"]: "foo" }, - notEqual: { ["productCat.name"]: "foo" }, - }, - ], - }, - }, - ], - }, - }).toContainExactly([ - { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], - }, - { - name: "bar", - productCat: [{ _id: productCatRows[1]._id }], - }, - { name: "baz", productCat: undefined }, - ]) - } - ) + describe("user", () => { + let user1: User + let user2: User - it("should allow nested ors with multiple conditions", async () => { + beforeAll(async () => { + user1 = await config.createUser({ _id: `us_${utils.newid()}` }) + user2 = await config.createUser({ _id: `us_${utils.newid()}` }) + + tableOrViewId = await createTableOrView({ + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + }) + + await createRows([ + { user: user1 }, + { user: user2 }, + { user: null }, + ]) + }) + + describe("equal", () => { + it("successfully finds a row", async () => { await expectQuery({ - $or: { - conditions: [ + equal: { user: user1._id }, + }).toContainExactly([{ user: { _id: user1._id } }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { user: "us_none" }, + }).toFindNothing() + }) + }) + + describe("notEqual", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notEqual: { user: user1._id }, + }).toContainExactly([{ user: { _id: user2._id } }, {}]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notEqual: { user: "us_none" }, + }).toContainExactly([ + { user: { _id: user1._id } }, + { user: { _id: user2._id } }, + {}, + ]) + }) + }) + + describe("oneOf", () => { + it("successfully finds a row", async () => { + await expectQuery({ + oneOf: { user: [user1._id] }, + }).toContainExactly([{ user: { _id: user1._id } }]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + oneOf: { user: ["us_none"] }, + }).toFindNothing() + }) + }) + + describe("empty", () => { + it("finds empty rows", async () => { + await expectQuery({ empty: { user: null } }).toContainExactly( + [{}] + ) + }) + }) + + describe("notEmpty", () => { + it("finds non-empty rows", async () => { + await expectQuery({ + notEmpty: { user: null }, + }).toContainExactly([ + { user: { _id: user1._id } }, + { user: { _id: user2._id } }, + ]) + }) + }) + }) + + describe("multi user", () => { + let user1: User + let user2: User + + beforeAll(async () => { + user1 = await config.createUser({ _id: `us_${utils.newid()}` }) + user2 = await config.createUser({ _id: `us_${utils.newid()}` }) + + tableOrViewId = await createTableOrView({ + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { type: "array" }, + }, + number: { + name: "number", + type: FieldType.NUMBER, + }, + }) + + await createRows([ + { number: 1, users: [user1] }, + { number: 2, users: [user2] }, + { number: 3, users: [user1, user2] }, + { number: 4, users: [] }, + ]) + }) + + describe("contains", () => { + it("successfully finds a row", async () => { + await expectQuery({ + contains: { users: [user1._id] }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + + it("successfully finds a row searching with a string", async () => { + await expectQuery({ + // @ts-expect-error this test specifically goes against the type to + // test that we coerce the string to an array. + contains: { "1:users": user1._id }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + contains: { users: ["us_none"] }, + }).toFindNothing() + }) + }) + + describe("notContains", () => { + it("successfully finds a row", async () => { + await expectQuery({ + notContains: { users: [user1._id] }, + }).toContainExactly([{ users: [{ _id: user2._id }] }, {}]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + notContains: { users: ["us_none"] }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user2._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + {}, + ]) + }) + }) + + describe("containsAny", () => { + it("successfully finds rows", async () => { + await expectQuery({ + containsAny: { users: [user1._id, user2._id] }, + }).toContainExactly([ + { users: [{ _id: user1._id }] }, + { users: [{ _id: user2._id }] }, + { users: [{ _id: user1._id }, { _id: user2._id }] }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + containsAny: { users: ["us_none"] }, + }).toFindNothing() + }) + }) + + describe("multi-column equals", () => { + it("successfully finds a row", async () => { + await expectQuery({ + equal: { number: 1 }, + contains: { users: [user1._id] }, + }).toContainExactly([ + { users: [{ _id: user1._id }], number: 1 }, + ]) + }) + + it("fails to find nonexistent row", async () => { + await expectQuery({ + equal: { number: 2 }, + contains: { users: [user1._id] }, + }).toFindNothing() + }) + }) + }) + + // It also can't work for in-memory searching because the related table name + // isn't available. + !isInMemory && + describe.each([ + RelationshipType.ONE_TO_MANY, + RelationshipType.MANY_TO_ONE, + RelationshipType.MANY_TO_MANY, + ])("relations (%s)", relationshipType => { + let productCategoryTable: Table, productCatRows: Row[] + + beforeAll(async () => { + const { relatedTable, tableId } = + await basicRelationshipTables(relationshipType) + tableOrViewId = tableId + productCategoryTable = relatedTable + + productCatRows = await Promise.all([ + config.api.row.save(productCategoryTable._id!, { + name: "foo", + }), + config.api.row.save(productCategoryTable._id!, { + name: "bar", + }), + ]) + + await Promise.all([ + config.api.row.save(tableOrViewId, { + name: "foo", + productCat: [productCatRows[0]._id], + }), + config.api.row.save(tableOrViewId, { + name: "bar", + productCat: [productCatRows[1]._id], + }), + config.api.row.save(tableOrViewId, { + name: "baz", + productCat: [], + }), + ]) + }) + + it("should be able to filter by relationship using column name", async () => { + await expectQuery({ + equal: { ["productCat.name"]: "foo" }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("should be able to filter by relationship using table name", async () => { + await expectQuery({ + equal: { [`${productCategoryTable.name}.name`]: "foo" }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("shouldn't return any relationship for last row", async () => { + await expectQuery({ + equal: { ["name"]: "baz" }, + }).toContainExactly([{ name: "baz", productCat: undefined }]) + }) + + describe("logical filters", () => { + const logicalOperators = [ + LogicalOperator.AND, + LogicalOperator.OR, + ] + + describe("$and", () => { + it("should allow single conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([ { - $or: { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("should allow exclusive conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([]) + }) + + it.each([logicalOperators])( + "should allow nested ands with single conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { conditions: [ { - equal: { ["productCat.name"]: "foo" }, + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, }, ], }, - notEqual: { ["productCat.name"]: "foo" }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + } + ) + + it.each([logicalOperators])( + "should allow nested ands with exclusive conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }, + ], + }, + }).toContainExactly([]) + } + ) + + it.each([logicalOperators])( + "should allow nested ands with multiple conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([]) + } + ) + }) + + describe("$ors", () => { + it("should allow single conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], }, - ], + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + }) + + it("should allow exclusive conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + { + name: "bar", + productCat: [{ _id: productCatRows[1]._id }], + }, + { name: "baz", productCat: undefined }, + ]) + }) + + it.each([logicalOperators])( + "should allow nested ors with single conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + ]) + } + ) + + it.each([logicalOperators])( + "should allow nested ors with exclusive conditions (with %s as root)", + async rootOperator => { + await expectQuery({ + [rootOperator]: { + conditions: [ + { + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + { + name: "bar", + productCat: [{ _id: productCatRows[1]._id }], + }, + { name: "baz", productCat: undefined }, + ]) + } + ) + + it("should allow nested ors with multiple conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { + $or: { + conditions: [ + { + equal: { ["productCat.name"]: "foo" }, + }, + ], + }, + notEqual: { ["productCat.name"]: "foo" }, + }, + ], + }, + }).toContainExactly([ + { + name: "foo", + productCat: [{ _id: productCatRows[0]._id }], + }, + { + name: "bar", + productCat: [{ _id: productCatRows[1]._id }], + }, + { name: "baz", productCat: undefined }, + ]) + }) + }) + }) + }) + + isSql && + describe.each([ + RelationshipType.MANY_TO_ONE, + RelationshipType.MANY_TO_MANY, + ])("big relations (%s)", relationshipType => { + beforeAll(async () => { + const { relatedTable, tableId } = + await basicRelationshipTables(relationshipType) + tableOrViewId = tableId + const mainRow = await config.api.row.save(tableOrViewId, { + name: "foo", + }) + for (let i = 0; i < 11; i++) { + await config.api.row.save(relatedTable._id!, { + name: i, + product: [mainRow._id!], + }) + } + }) + + it("can only pull 10 related rows", async () => { + await withCoreEnv( + { SQL_MAX_RELATED_ROWS: "10" }, + async () => { + const response = await expectQuery({}).toContain([ + { name: "foo" }, + ]) + expect(response.rows[0].productCat).toBeArrayOfSize(10) + } + ) + }) + + it("can pull max rows when env not set (defaults to 500)", async () => { + const response = await expectQuery({}).toContain([ + { name: "foo" }, + ]) + expect(response.rows[0].productCat).toBeArrayOfSize(11) + }) + }) + + isSql && + describe("relations to same table", () => { + let relatedTable: string, relatedRows: Row[] + + beforeAll(async () => { + relatedTable = await createTable({ + name: { name: "name", type: FieldType.STRING }, + }) + tableOrViewId = await createTableOrView({ + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTable, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + related2: { + type: FieldType.LINK, + name: "related2", + fieldName: "main2", + tableId: relatedTable, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }) + relatedRows = await Promise.all([ + config.api.row.save(relatedTable, { name: "foo" }), + config.api.row.save(relatedTable, { name: "bar" }), + config.api.row.save(relatedTable, { name: "baz" }), + config.api.row.save(relatedTable, { name: "boo" }), + ]) + await Promise.all([ + config.api.row.save(tableOrViewId, { + name: "test", + related1: [relatedRows[0]._id!], + related2: [relatedRows[1]._id!], + }), + config.api.row.save(tableOrViewId, { + name: "test2", + related1: [relatedRows[2]._id!], + related2: [relatedRows[3]._id!], + }), + config.api.row.save(tableOrViewId, { + name: "test3", + related1: [relatedRows[1]._id], + related2: [relatedRows[2]._id!], + }), + ]) + }) + + it("should be able to relate to same table", async () => { + await expectSearch({ + query: {}, + }).toContainExactly([ + { + name: "test", + related1: [{ _id: relatedRows[0]._id }], + related2: [{ _id: relatedRows[1]._id }], + }, + { + name: "test2", + related1: [{ _id: relatedRows[2]._id }], + related2: [{ _id: relatedRows[3]._id }], + }, + { + name: "test3", + related1: [{ _id: relatedRows[1]._id }], + related2: [{ _id: relatedRows[2]._id }], + }, + ]) + }) + + it("should be able to filter via the first relation field with equal", async () => { + await expectSearch({ + query: { + equal: { + ["related1.name"]: "baz", + }, }, }).toContainExactly([ { - name: "foo", - productCat: [{ _id: productCatRows[0]._id }], + name: "test2", + related1: [{ _id: relatedRows[2]._id }], }, + ]) + }) + + it("should be able to filter via the second relation field with not equal", async () => { + await expectSearch({ + query: { + notEqual: { + ["1:related2.name"]: "foo", + ["2:related2.name"]: "baz", + ["3:related2.name"]: "boo", + }, + }, + }).toContainExactly([ { - name: "bar", - productCat: [{ _id: productCatRows[1]._id }], + name: "test", + }, + ]) + }) + + it("should be able to filter on both fields", async () => { + await expectSearch({ + query: { + notEqual: { + ["related1.name"]: "foo", + ["related2.name"]: "baz", + }, + }, + }).toContainExactly([ + { + name: "test2", }, - { name: "baz", productCat: undefined }, ]) }) }) - }) - }) - isSql && - describe.each([ - RelationshipType.MANY_TO_ONE, - RelationshipType.MANY_TO_MANY, - ])("big relations (%s)", relationshipType => { - beforeAll(async () => { - const { relatedTable, tableId } = await basicRelationshipTables( - relationshipType - ) - tableOrViewId = tableId - const mainRow = await config.api.row.save(tableOrViewId, { - name: "foo", - }) - for (let i = 0; i < 11; i++) { - await config.api.row.save(relatedTable._id!, { - name: i, - product: [mainRow._id!], - }) - } - }) - - it("can only pull 10 related rows", async () => { - await withCoreEnv({ SQL_MAX_RELATED_ROWS: "10" }, async () => { - const response = await expectQuery({}).toContain([ - { name: "foo" }, - ]) - expect(response.rows[0].productCat).toBeArrayOfSize(10) - }) - }) - - it("can pull max rows when env not set (defaults to 500)", async () => { - const response = await expectQuery({}).toContain([ - { name: "foo" }, - ]) - expect(response.rows[0].productCat).toBeArrayOfSize(11) - }) - }) - - isSql && - describe("relations to same table", () => { - let relatedTable: string, relatedRows: Row[] - - beforeAll(async () => { - relatedTable = await createTable({ - name: { name: "name", type: FieldType.STRING }, - }) - tableOrViewId = await createTableOrView({ - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTable, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - related2: { - type: FieldType.LINK, - name: "related2", - fieldName: "main2", - tableId: relatedTable, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }) - relatedRows = await Promise.all([ - config.api.row.save(relatedTable, { name: "foo" }), - config.api.row.save(relatedTable, { name: "bar" }), - config.api.row.save(relatedTable, { name: "baz" }), - config.api.row.save(relatedTable, { name: "boo" }), - ]) - await Promise.all([ - config.api.row.save(tableOrViewId, { - name: "test", - related1: [relatedRows[0]._id!], - related2: [relatedRows[1]._id!], - }), - config.api.row.save(tableOrViewId, { - name: "test2", - related1: [relatedRows[2]._id!], - related2: [relatedRows[3]._id!], - }), - config.api.row.save(tableOrViewId, { - name: "test3", - related1: [relatedRows[1]._id], - related2: [relatedRows[2]._id!], - }), - ]) - }) - - it("should be able to relate to same table", async () => { - await expectSearch({ - query: {}, - }).toContainExactly([ - { - name: "test", - related1: [{ _id: relatedRows[0]._id }], - related2: [{ _id: relatedRows[1]._id }], - }, - { - name: "test2", - related1: [{ _id: relatedRows[2]._id }], - related2: [{ _id: relatedRows[3]._id }], - }, - { - name: "test3", - related1: [{ _id: relatedRows[1]._id }], - related2: [{ _id: relatedRows[2]._id }], - }, - ]) - }) - - it("should be able to filter via the first relation field with equal", async () => { - await expectSearch({ - query: { - equal: { - ["related1.name"]: "baz", - }, - }, - }).toContainExactly([ - { - name: "test2", - related1: [{ _id: relatedRows[2]._id }], - }, - ]) - }) - - it("should be able to filter via the second relation field with not equal", async () => { - await expectSearch({ - query: { - notEqual: { - ["1:related2.name"]: "foo", - ["2:related2.name"]: "baz", - ["3:related2.name"]: "boo", - }, - }, - }).toContainExactly([ - { - name: "test", - }, - ]) - }) - - it("should be able to filter on both fields", async () => { - await expectSearch({ - query: { - notEqual: { - ["related1.name"]: "foo", - ["related2.name"]: "baz", - }, - }, - }).toContainExactly([ - { - name: "test2", - }, - ]) - }) - }) - - isInternal && - describe("no column error backwards compat", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - }) - - it("shouldn't error when column doesn't exist", async () => { - await expectSearch({ - query: { - string: { - "1:something": "a", - }, - }, - }).toMatch({ rows: [] }) - }) - }) - - describe("row counting", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - await createRows([{ name: "a" }, { name: "b" }]) - }) - - it("should be able to count rows when option set", async () => { - await expectSearch({ - countRows: true, - query: { - notEmpty: { - name: true, - }, - }, - }).toMatch({ totalRows: 2, rows: expect.any(Array) }) - }) - - it("shouldn't count rows when option is not set", async () => { - await expectSearch({ - countRows: false, - query: { - notEmpty: { - name: true, - }, - }, - }).toNotHaveProperty(["totalRows"]) - }) - }) - - describe("Invalid column definitions", () => { - beforeAll(async () => { - // need to create an invalid table - means ignoring typescript - tableOrViewId = await createTableOrView({ - // @ts-ignore - invalid: { - type: FieldType.STRING, - }, - name: { - name: "name", - type: FieldType.STRING, - }, - }) - await createRows([ - { name: "foo", invalid: "id1" }, - { name: "bar", invalid: "id2" }, - ]) - }) - - it("can get rows with all table data", async () => { - await expectSearch({ - query: {}, - }).toContain([ - { name: "foo", invalid: "id1" }, - { name: "bar", invalid: "id2" }, - ]) - }) - }) - - describe.each(["data_name_test", "name_data_test", "name_test_data_"])( - "special (%s) case", - column => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - [column]: { - name: column, - type: FieldType.STRING, - }, - }) - await createRows([{ [column]: "a" }, { [column]: "b" }]) - }) - - it("should be able to query a column with data_ in it", async () => { - await expectSearch({ - query: { - equal: { - [`1:${column}`]: "a", - }, - }, - }).toContainExactly([{ [column]: "a" }]) - }) - } - ) - - isInternal && - describe("sample data", () => { - beforeAll(async () => { - await config.api.application.addSampleData(config.appId!) - tableOrViewId = DEFAULT_EMPLOYEE_TABLE_SCHEMA._id! - rows = await config.api.row.fetch(tableOrViewId) - }) - - it("should be able to search sample data", async () => { - await expectSearch({ - query: {}, - }).toContain([ - { - "First Name": "Mandy", - }, - ]) - }) - }) - - describe.each([ - { low: "2024-07-03T00:00:00.000Z", high: "9999-00-00T00:00:00.000Z" }, - { low: "2024-07-03T00:00:00.000Z", high: "9998-00-00T00:00:00.000Z" }, - { low: "0000-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, - { low: "0001-00-00T00:00:00.000Z", high: "2024-07-04T00:00:00.000Z" }, - ])("date special cases", ({ low, high }) => { - const earlyDate = "2024-07-03T10:00:00.000Z", - laterDate = "2024-07-03T11:00:00.000Z" - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - date: { - name: "date", - type: FieldType.DATETIME, - }, - }) - await createRows([{ date: earlyDate }, { date: laterDate }]) - }) - - it("should be able to handle a date search", async () => { - await expectSearch({ - query: { - range: { - "1:date": { low, high }, - }, - }, - }).toContainExactly([{ date: earlyDate }, { date: laterDate }]) - }) - }) - - describe.each([ - "名前", // Japanese for "name" - "Benutzer-ID", // German for "user ID", includes a hyphen - "numéro", // French for "number", includes an accent - "år", // Swedish for "year", includes a ring above - "naïve", // English word borrowed from French, includes an umlaut - "الاسم", // Arabic for "name" - "оплата", // Russian for "payment" - "पता", // Hindi for "address" - "用戶名", // Chinese for "username" - "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla - "preço", // Portuguese for "price", includes a cedilla - "사용자명", // Korean for "username" - "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" - "файл", // Bulgarian for "file" - "δεδομένα", // Greek for "data" - "geändert_am", // German for "modified on", includes an umlaut - "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore - "São_Paulo", // Portuguese, includes an underscore and a tilde - "età", // Italian for "age", includes an accent - "ชื่อผู้ใช้", // Thai for "username" - ])("non-ascii column name: %s", name => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - [name]: { - name, - type: FieldType.STRING, - }, - }) - await createRows([{ [name]: "a" }, { [name]: "b" }]) - }) - - it("should be able to query a column with non-ascii characters", async () => { - await expectSearch({ - query: { - equal: { - [`1:${name}`]: "a", - }, - }, - }).toContainExactly([{ [name]: "a" }]) - }) - }) - - // This is currently not supported in external datasources, it produces SQL - // errors at time of writing. We supported it (potentially by accident) in - // Lucene, though, so we need to make sure it's supported in SQS as well. We - // found real cases in production of column names ending in a space. - isInternal && - describe("space at end of column name", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - "name ": { - name: "name ", - type: FieldType.STRING, - }, - }) - await createRows([{ ["name "]: "foo" }, { ["name "]: "bar" }]) - }) - - it("should be able to query a column that ends with a space", async () => { - await expectSearch({ - query: { - string: { - "name ": "foo", - }, - }, - }).toContainExactly([{ ["name "]: "foo" }]) - }) - - it("should be able to query a column that ends with a space using numeric notation", async () => { - await expectSearch({ - query: { - string: { - "1:name ": "foo", - }, - }, - }).toContainExactly([{ ["name "]: "foo" }]) - }) - }) - - isInternal && - describe("space at start of column name", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - " name": { - name: " name", - type: FieldType.STRING, - }, - }) - await createRows([{ [" name"]: "foo" }, { [" name"]: "bar" }]) - }) - - it("should be able to query a column that starts with a space", async () => { - await expectSearch({ - query: { - string: { - " name": "foo", - }, - }, - }).toContainExactly([{ [" name"]: "foo" }]) - }) - - it("should be able to query a column that starts with a space using numeric notation", async () => { - await expectSearch({ - query: { - string: { - "1: name": "foo", - }, - }, - }).toContainExactly([{ [" name"]: "foo" }]) - }) - }) - - isInternal && - !isView && - describe("duplicate columns", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - const tableDoc = await db.get
(tableOrViewId) - tableDoc.schema.Name = { - name: "Name", - type: FieldType.STRING, - } - try { - // remove the SQLite definitions so that they can be rebuilt as part of the search - const sqliteDoc = await db.get(SQLITE_DESIGN_DOC_ID) - await db.remove(sqliteDoc) - } catch (err) { - // no-op - } - }) - await createRows([{ name: "foo", Name: "bar" }]) - }) - - it("should handle invalid duplicate column names", async () => { - await expectSearch({ - query: {}, - }).toContainExactly([{ name: "foo" }]) - }) - }) - - !isInMemory && - describe("search by _id", () => { - let row: Row - - beforeAll(async () => { - const toRelateTable = await createTable({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - rel: { - name: "rel", - type: FieldType.LINK, - relationshipType: RelationshipType.MANY_TO_MANY, - tableId: toRelateTable, - fieldName: "rel", - }, - }) - const [row1, row2] = await Promise.all([ - config.api.row.save(toRelateTable, { name: "tag 1" }), - config.api.row.save(toRelateTable, { name: "tag 2" }), - ]) - row = await config.api.row.save(tableOrViewId, { - name: "product 1", - rel: [row1._id, row2._id], - }) - }) - - it("can filter by the row ID with limit 1", async () => { - await expectSearch({ - query: { - equal: { _id: row._id }, - }, - limit: 1, - }).toContainExactly([row]) - }) - }) - - !isInternal && - describe("search by composite key", () => { - beforeAll(async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - idColumn1: { - name: "idColumn1", - type: FieldType.NUMBER, - }, - idColumn2: { - name: "idColumn2", - type: FieldType.NUMBER, - }, - }, - primary: ["idColumn1", "idColumn2"], - }) - ) - tableOrViewId = table._id! - await createRows([{ idColumn1: 1, idColumn2: 2 }]) - }) - - it("can filter by the row ID with limit 1", async () => { - await expectSearch({ - query: { - equal: { _id: generateRowIdField([1, 2]) }, - }, - limit: 1, - }).toContain([ - { - idColumn1: 1, - idColumn2: 2, - }, - ]) - }) - }) - - isSql && - describe("primaryDisplay", () => { - beforeAll(async () => { - let toRelateTableId = await createTable({ - name: { - name: "name", - type: FieldType.STRING, - }, - }) - tableOrViewId = await createTableOrView({ - name: { - name: "name", - type: FieldType.STRING, - }, - link: { - name: "link", - type: FieldType.LINK, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: toRelateTableId, - fieldName: "link", - }, - }) - - const toRelateTable = await config.api.table.get(toRelateTableId) - await config.api.table.save({ - ...toRelateTable, - primaryDisplay: "link", - }) - const relatedRows = await Promise.all([ - config.api.row.save(toRelateTable._id!, { name: "related" }), - ]) - await config.api.row.save(tableOrViewId, { - name: "test", - link: relatedRows.map(row => row._id), - }) - }) - - it("should be able to query, primary display on related table shouldn't be used", async () => { - // this test makes sure that if a relationship has been specified as the primary display on a table - // it is ignored and another column is used instead - await expectQuery({}).toContain([ - { name: "test", link: [{ primaryDisplay: "related" }] }, - ]) - }) - }) - - describe("$and", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - age: { name: "age", type: FieldType.NUMBER }, - name: { name: "name", type: FieldType.STRING }, - }) - await createRows([ - { age: 1, name: "Jane" }, - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds a row for one level condition", async () => { - await expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([{ age: 10, name: "Jack" }]) - }) - - it("successfully finds a row for one level with multiple conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([{ age: 10, name: "Jack" }]) - }) - - it("successfully finds multiple rows for one level with multiple conditions", async () => { - await expectQuery({ - $and: { - conditions: [ - { range: { age: { low: 1, high: 9 } } }, - { string: { name: "Ja" } }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds rows for nested filters", async () => { - await expectQuery({ - $and: { - conditions: [ - { - $and: { - conditions: [ - { - range: { age: { low: 1, high: 10 } }, - }, - { string: { name: "Ja" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([{ age: 1, name: "Jane" }]) - }) - - it("returns nothing when filtering out all data", async () => { - await expectQuery({ - $and: { - conditions: [ - { equal: { age: 7 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toFindNothing() - }) - - !isInMemory && - it("validates conditions that are not objects", async () => { - await expect( - expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - "invalidCondition" as any, - ], - }, - }).toFindNothing() - ).rejects.toThrow( - 'Invalid body - "query.$and.conditions[1]" must be of type object' - ) - }) - - !isInMemory && - it("validates $and without conditions", async () => { - await expect( - expectQuery({ - $and: { - conditions: [ - { equal: { age: 10 } }, - { - $and: { - conditions: undefined as any, - }, - }, - ], - }, - }).toFindNothing() - ).rejects.toThrow( - 'Invalid body - "query.$and.conditions[1].$and.conditions" is required' - ) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("returns no rows when onEmptyFilter set to none", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - $and: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toFindNothing() - }) - - it("returns all rows when onEmptyFilter set to all", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toHaveLength(4) - }) - }) - - describe("$or", () => { - beforeAll(async () => { - tableOrViewId = await createTableOrView({ - age: { name: "age", type: FieldType.NUMBER }, - name: { name: "name", type: FieldType.STRING }, - }) - await createRows([ - { age: 1, name: "Jane" }, - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds a row for one level condition", async () => { - await expectQuery({ - $or: { - conditions: [ - { equal: { age: 7 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([ - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - ]) - }) - - it("successfully finds a row for one level with multiple conditions", async () => { - await expectQuery({ - $or: { - conditions: [ - { equal: { age: 7 } }, - { equal: { name: "Jack" } }, - ], - }, - }).toContainExactly([ - { age: 10, name: "Jack" }, - { age: 7, name: "Hanna" }, - ]) - }) - - it("successfully finds multiple rows for one level with multiple conditions", async () => { - await expectQuery({ - $or: { - conditions: [ - { range: { age: { low: 1, high: 9 } } }, - { string: { name: "Jan" } }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("successfully finds rows for nested filters", async () => { - await expectQuery({ - $or: { - conditions: [ - { - $or: { - conditions: [ - { - range: { age: { low: 1, high: 7 } }, - }, - { string: { name: "Jan" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 7, name: "Hanna" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("returns nothing when filtering out all data", async () => { - await expectQuery({ - $or: { - conditions: [ - { equal: { age: 6 } }, - { equal: { name: "John" } }, - ], - }, - }).toFindNothing() - }) - - it("can nest $and under $or filters", async () => { - await expectQuery({ - $or: { - conditions: [ - { - $and: { - conditions: [ - { - range: { age: { low: 1, high: 8 } }, - }, - { equal: { name: "Jan" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([ - { age: 1, name: "Jane" }, - { age: 8, name: "Jan" }, - ]) - }) - - it("can nest $or under $and filters", async () => { - await expectQuery({ - $and: { - conditions: [ - { - $or: { - conditions: [ - { - range: { age: { low: 1, high: 8 } }, - }, - { equal: { name: "Jan" } }, - ], - }, - equal: { name: "Jane" }, - }, - ], - }, - }).toContainExactly([{ age: 1, name: "Jane" }]) - }) - - // onEmptyFilter cannot be sent to view searches - !isView && - it("returns no rows when onEmptyFilter set to none", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_NONE, - $or: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toFindNothing() - }) - - it("returns all rows when onEmptyFilter set to all", async () => { - await expectSearch({ - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $or: { - conditions: [{ equal: { name: "" } }], - }, - }, - }).toHaveLength(4) - }) - }) - - isSql && - describe("max related columns", () => { - let relatedRows: Row[] - - beforeAll(async () => { - const relatedSchema: TableSchema = {} - const row: Row = {} - for (let i = 0; i < 100; i++) { - const name = `column${i}` - relatedSchema[name] = { name, type: FieldType.NUMBER } - row[name] = i - } - const relatedTable = await createTable(relatedSchema) - tableOrViewId = await createTableOrView({ - name: { name: "name", type: FieldType.STRING }, - related1: { - type: FieldType.LINK, - name: "related1", - fieldName: "main1", - tableId: relatedTable, - relationshipType: RelationshipType.MANY_TO_MANY, - }, - }) - relatedRows = await Promise.all([ - config.api.row.save(relatedTable, row), - ]) - await config.api.row.save(tableOrViewId, { - name: "foo", - related1: [relatedRows[0]._id], - }) - }) - - it("retrieve the row with relationships", async () => { - await expectQuery({}).toContainExactly([ - { - name: "foo", - related1: [{ _id: relatedRows[0]._id }], - }, - ]) - }) - }) - - !isInternal && - describe("SQL injection", () => { - const badStrings = [ - "1; DROP TABLE %table_name%;", - "1; DELETE FROM %table_name%;", - "1; UPDATE %table_name% SET name = 'foo';", - "1; INSERT INTO %table_name% (name) VALUES ('foo');", - "' OR '1'='1' --", - "'; DROP TABLE %table_name%; --", - "' OR 1=1 --", - "' UNION SELECT null, null, null; --", - "' AND (SELECT COUNT(*) FROM %table_name%) > 0 --", - "\"; EXEC xp_cmdshell('dir'); --", - "\"' OR 'a'='a", - "OR 1=1;", - "'; SHUTDOWN --", - ] - - describe.each(badStrings)("bad string: %s", badStringTemplate => { - // The SQL that knex generates when you try to use a double quote in a - // field name is always invalid and never works, so we skip it for these - // tests. - const skipFieldNameCheck = - isOracle && badStringTemplate.includes('"') - - !skipFieldNameCheck && - it("should not allow SQL injection as a field name", async () => { - const tableOrViewId = await createTableOrView() - const table = await getTable(tableOrViewId) - const badString = badStringTemplate.replace( - /%table_name%/g, - table.name - ) - - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - [badString]: { name: badString, type: FieldType.STRING }, + isInternal && + describe("no column error backwards compat", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, }, }) - - if (docIds.isViewId(tableOrViewId)) { - const view = await config.api.viewV2.get(tableOrViewId) - await config.api.viewV2.update({ - ...view, - schema: { - [badString]: { visible: true }, - }, - }) - } - - await config.api.row.save(tableOrViewId, { - [badString]: "foo", - }) - - await assertTableExists(table) - await assertTableNumRows(table, 1) - - const { rows } = await config.api.row.search( - tableOrViewId, - { query: {} }, - { status: 200 } - ) - - expect(rows).toHaveLength(1) - - await assertTableExists(table) - await assertTableNumRows(table, 1) }) - it("should not allow SQL injection as a field value", async () => { - const tableOrViewId = await createTableOrView({ - foo: { - name: "foo", + it("shouldn't error when column doesn't exist", async () => { + await expectSearch({ + query: { + string: { + "1:something": "a", + }, + }, + }).toMatch({ rows: [] }) + }) + }) + + describe("row counting", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { + name: "name", type: FieldType.STRING, }, }) - const table = await getTable(tableOrViewId) - const badString = badStringTemplate.replace( - /%table_name%/g, - table.name - ) + await createRows([{ name: "a" }, { name: "b" }]) + }) - await config.api.row.save(tableOrViewId, { foo: "foo" }) + it("should be able to count rows when option set", async () => { + await expectSearch({ + countRows: true, + query: { + notEmpty: { + name: true, + }, + }, + }).toMatch({ totalRows: 2, rows: expect.any(Array) }) + }) - await assertTableExists(table) - await assertTableNumRows(table, 1) - - const { rows } = await config.api.row.search( - tableOrViewId, - { query: { equal: { foo: badString } } }, - { status: 200 } - ) - - expect(rows).toBeEmpty() - await assertTableExists(table) - await assertTableNumRows(table, 1) + it("shouldn't count rows when option is not set", async () => { + await expectSearch({ + countRows: false, + query: { + notEmpty: { + name: true, + }, + }, + }).toNotHaveProperty(["totalRows"]) }) }) - }) + + describe("Invalid column definitions", () => { + beforeAll(async () => { + // need to create an invalid table - means ignoring typescript + tableOrViewId = await createTableOrView({ + // @ts-ignore + invalid: { + type: FieldType.STRING, + }, + name: { + name: "name", + type: FieldType.STRING, + }, + }) + await createRows([ + { name: "foo", invalid: "id1" }, + { name: "bar", invalid: "id2" }, + ]) + }) + + it("can get rows with all table data", async () => { + await expectSearch({ + query: {}, + }).toContain([ + { name: "foo", invalid: "id1" }, + { name: "bar", invalid: "id2" }, + ]) + }) + }) + + describe.each([ + "data_name_test", + "name_data_test", + "name_test_data_", + ])("special (%s) case", column => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + [column]: { + name: column, + type: FieldType.STRING, + }, + }) + await createRows([{ [column]: "a" }, { [column]: "b" }]) + }) + + it("should be able to query a column with data_ in it", async () => { + await expectSearch({ + query: { + equal: { + [`1:${column}`]: "a", + }, + }, + }).toContainExactly([{ [column]: "a" }]) + }) + }) + + isInternal && + describe("sample data", () => { + beforeAll(async () => { + await config.api.application.addSampleData(config.appId!) + tableOrViewId = DEFAULT_EMPLOYEE_TABLE_SCHEMA._id! + rows = await config.api.row.fetch(tableOrViewId) + }) + + it("should be able to search sample data", async () => { + await expectSearch({ + query: {}, + }).toContain([ + { + "First Name": "Mandy", + }, + ]) + }) + }) + + describe.each([ + { + low: "2024-07-03T00:00:00.000Z", + high: "9999-00-00T00:00:00.000Z", + }, + { + low: "2024-07-03T00:00:00.000Z", + high: "9998-00-00T00:00:00.000Z", + }, + { + low: "0000-00-00T00:00:00.000Z", + high: "2024-07-04T00:00:00.000Z", + }, + { + low: "0001-00-00T00:00:00.000Z", + high: "2024-07-04T00:00:00.000Z", + }, + ])("date special cases", ({ low, high }) => { + const earlyDate = "2024-07-03T10:00:00.000Z", + laterDate = "2024-07-03T11:00:00.000Z" + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + date: { + name: "date", + type: FieldType.DATETIME, + }, + }) + await createRows([{ date: earlyDate }, { date: laterDate }]) + }) + + it("should be able to handle a date search", async () => { + await expectSearch({ + query: { + range: { + "1:date": { low, high }, + }, + }, + }).toContainExactly([{ date: earlyDate }, { date: laterDate }]) + }) + }) + + describe.each([ + "名前", // Japanese for "name" + "Benutzer-ID", // German for "user ID", includes a hyphen + "numéro", // French for "number", includes an accent + "år", // Swedish for "year", includes a ring above + "naïve", // English word borrowed from French, includes an umlaut + "الاسم", // Arabic for "name" + "оплата", // Russian for "payment" + "पता", // Hindi for "address" + "用戶名", // Chinese for "username" + "çalışma_zamanı", // Turkish for "runtime", includes an underscore and a cedilla + "preço", // Portuguese for "price", includes a cedilla + "사용자명", // Korean for "username" + "usuario_ñoño", // Spanish, uses an underscore and includes "ñ" + "файл", // Bulgarian for "file" + "δεδομένα", // Greek for "data" + "geändert_am", // German for "modified on", includes an umlaut + "ব্যবহারকারীর_নাম", // Bengali for "user name", includes an underscore + "São_Paulo", // Portuguese, includes an underscore and a tilde + "età", // Italian for "age", includes an accent + "ชื่อผู้ใช้", // Thai for "username" + ])("non-ascii column name: %s", name => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + [name]: { + name, + type: FieldType.STRING, + }, + }) + await createRows([{ [name]: "a" }, { [name]: "b" }]) + }) + + it("should be able to query a column with non-ascii characters", async () => { + await expectSearch({ + query: { + equal: { + [`1:${name}`]: "a", + }, + }, + }).toContainExactly([{ [name]: "a" }]) + }) + }) + + // This is currently not supported in external datasources, it produces SQL + // errors at time of writing. We supported it (potentially by accident) in + // Lucene, though, so we need to make sure it's supported in SQS as well. We + // found real cases in production of column names ending in a space. + isInternal && + describe("space at end of column name", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + "name ": { + name: "name ", + type: FieldType.STRING, + }, + }) + await createRows([{ ["name "]: "foo" }, { ["name "]: "bar" }]) + }) + + it("should be able to query a column that ends with a space", async () => { + await expectSearch({ + query: { + string: { + "name ": "foo", + }, + }, + }).toContainExactly([{ ["name "]: "foo" }]) + }) + + it("should be able to query a column that ends with a space using numeric notation", async () => { + await expectSearch({ + query: { + string: { + "1:name ": "foo", + }, + }, + }).toContainExactly([{ ["name "]: "foo" }]) + }) + }) + + isInternal && + describe("space at start of column name", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + " name": { + name: " name", + type: FieldType.STRING, + }, + }) + await createRows([{ [" name"]: "foo" }, { [" name"]: "bar" }]) + }) + + it("should be able to query a column that starts with a space", async () => { + await expectSearch({ + query: { + string: { + " name": "foo", + }, + }, + }).toContainExactly([{ [" name"]: "foo" }]) + }) + + it("should be able to query a column that starts with a space using numeric notation", async () => { + await expectSearch({ + query: { + string: { + "1: name": "foo", + }, + }, + }).toContainExactly([{ [" name"]: "foo" }]) + }) + }) + + isInternal && + !isView && + describe("duplicate columns", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + const tableDoc = await db.get
(tableOrViewId) + tableDoc.schema.Name = { + name: "Name", + type: FieldType.STRING, + } + try { + // remove the SQLite definitions so that they can be rebuilt as part of the search + const sqliteDoc = await db.get(SQLITE_DESIGN_DOC_ID) + await db.remove(sqliteDoc) + } catch (err) { + // no-op + } + }) + await createRows([{ name: "foo", Name: "bar" }]) + }) + + it("should handle invalid duplicate column names", async () => { + await expectSearch({ + query: {}, + }).toContainExactly([{ name: "foo" }]) + }) + }) + + !isInMemory && + describe("search by _id", () => { + let row: Row + + beforeAll(async () => { + const toRelateTable = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, + }, + rel: { + name: "rel", + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_MANY, + tableId: toRelateTable, + fieldName: "rel", + }, + }) + const [row1, row2] = await Promise.all([ + config.api.row.save(toRelateTable, { name: "tag 1" }), + config.api.row.save(toRelateTable, { name: "tag 2" }), + ]) + row = await config.api.row.save(tableOrViewId, { + name: "product 1", + rel: [row1._id, row2._id], + }) + }) + + it("can filter by the row ID with limit 1", async () => { + await expectSearch({ + query: { + equal: { _id: row._id }, + }, + limit: 1, + }).toContainExactly([row]) + }) + }) + + !isInternal && + describe("search by composite key", () => { + beforeAll(async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + idColumn1: { + name: "idColumn1", + type: FieldType.NUMBER, + }, + idColumn2: { + name: "idColumn2", + type: FieldType.NUMBER, + }, + }, + primary: ["idColumn1", "idColumn2"], + }) + ) + tableOrViewId = table._id! + await createRows([{ idColumn1: 1, idColumn2: 2 }]) + }) + + it("can filter by the row ID with limit 1", async () => { + await expectSearch({ + query: { + equal: { _id: generateRowIdField([1, 2]) }, + }, + limit: 1, + }).toContain([ + { + idColumn1: 1, + idColumn2: 2, + }, + ]) + }) + }) + + isSql && + describe("primaryDisplay", () => { + beforeAll(async () => { + let toRelateTableId = await createTable({ + name: { + name: "name", + type: FieldType.STRING, + }, + }) + tableOrViewId = await createTableOrView({ + name: { + name: "name", + type: FieldType.STRING, + }, + link: { + name: "link", + type: FieldType.LINK, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: toRelateTableId, + fieldName: "link", + }, + }) + + const toRelateTable = await config.api.table.get( + toRelateTableId + ) + await config.api.table.save({ + ...toRelateTable, + primaryDisplay: "link", + }) + const relatedRows = await Promise.all([ + config.api.row.save(toRelateTable._id!, { + name: "related", + }), + ]) + await config.api.row.save(tableOrViewId, { + name: "test", + link: relatedRows.map(row => row._id), + }) + }) + + it("should be able to query, primary display on related table shouldn't be used", async () => { + // this test makes sure that if a relationship has been specified as the primary display on a table + // it is ignored and another column is used instead + await expectQuery({}).toContain([ + { name: "test", link: [{ primaryDisplay: "related" }] }, + ]) + }) + }) + + describe("$and", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + age: { name: "age", type: FieldType.NUMBER }, + name: { name: "name", type: FieldType.STRING }, + }) + await createRows([ + { age: 1, name: "Jane" }, + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds a row for one level condition", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([{ age: 10, name: "Jack" }]) + }) + + it("successfully finds a row for one level with multiple conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([{ age: 10, name: "Jack" }]) + }) + + it("successfully finds multiple rows for one level with multiple conditions", async () => { + await expectQuery({ + $and: { + conditions: [ + { range: { age: { low: 1, high: 9 } } }, + { string: { name: "Ja" } }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds rows for nested filters", async () => { + await expectQuery({ + $and: { + conditions: [ + { + $and: { + conditions: [ + { + range: { age: { low: 1, high: 10 } }, + }, + { string: { name: "Ja" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([{ age: 1, name: "Jane" }]) + }) + + it("returns nothing when filtering out all data", async () => { + await expectQuery({ + $and: { + conditions: [ + { equal: { age: 7 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toFindNothing() + }) + + !isInMemory && + it("validates conditions that are not objects", async () => { + await expect( + expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + "invalidCondition" as any, + ], + }, + }).toFindNothing() + ).rejects.toThrow( + 'Invalid body - "query.$and.conditions[1]" must be of type object' + ) + }) + + !isInMemory && + it("validates $and without conditions", async () => { + await expect( + expectQuery({ + $and: { + conditions: [ + { equal: { age: 10 } }, + { + $and: { + conditions: undefined as any, + }, + }, + ], + }, + }).toFindNothing() + ).rejects.toThrow( + 'Invalid body - "query.$and.conditions[1].$and.conditions" is required' + ) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("returns no rows when onEmptyFilter set to none", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + $and: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toFindNothing() + }) + + it("returns all rows when onEmptyFilter set to all", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toHaveLength(4) + }) + }) + + describe("$or", () => { + beforeAll(async () => { + tableOrViewId = await createTableOrView({ + age: { name: "age", type: FieldType.NUMBER }, + name: { name: "name", type: FieldType.STRING }, + }) + await createRows([ + { age: 1, name: "Jane" }, + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds a row for one level condition", async () => { + await expectQuery({ + $or: { + conditions: [ + { equal: { age: 7 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([ + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + ]) + }) + + it("successfully finds a row for one level with multiple conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { equal: { age: 7 } }, + { equal: { name: "Jack" } }, + ], + }, + }).toContainExactly([ + { age: 10, name: "Jack" }, + { age: 7, name: "Hanna" }, + ]) + }) + + it("successfully finds multiple rows for one level with multiple conditions", async () => { + await expectQuery({ + $or: { + conditions: [ + { range: { age: { low: 1, high: 9 } } }, + { string: { name: "Jan" } }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("successfully finds rows for nested filters", async () => { + await expectQuery({ + $or: { + conditions: [ + { + $or: { + conditions: [ + { + range: { age: { low: 1, high: 7 } }, + }, + { string: { name: "Jan" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 7, name: "Hanna" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("returns nothing when filtering out all data", async () => { + await expectQuery({ + $or: { + conditions: [ + { equal: { age: 6 } }, + { equal: { name: "John" } }, + ], + }, + }).toFindNothing() + }) + + it("can nest $and under $or filters", async () => { + await expectQuery({ + $or: { + conditions: [ + { + $and: { + conditions: [ + { + range: { age: { low: 1, high: 8 } }, + }, + { equal: { name: "Jan" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([ + { age: 1, name: "Jane" }, + { age: 8, name: "Jan" }, + ]) + }) + + it("can nest $or under $and filters", async () => { + await expectQuery({ + $and: { + conditions: [ + { + $or: { + conditions: [ + { + range: { age: { low: 1, high: 8 } }, + }, + { equal: { name: "Jan" } }, + ], + }, + equal: { name: "Jane" }, + }, + ], + }, + }).toContainExactly([{ age: 1, name: "Jane" }]) + }) + + // onEmptyFilter cannot be sent to view searches + !isView && + it("returns no rows when onEmptyFilter set to none", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + $or: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toFindNothing() + }) + + it("returns all rows when onEmptyFilter set to all", async () => { + await expectSearch({ + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $or: { + conditions: [{ equal: { name: "" } }], + }, + }, + }).toHaveLength(4) + }) + }) + + isSql && + describe("max related columns", () => { + let relatedRows: Row[] + + beforeAll(async () => { + const relatedSchema: TableSchema = {} + const row: Row = {} + for (let i = 0; i < 100; i++) { + const name = `column${i}` + relatedSchema[name] = { name, type: FieldType.NUMBER } + row[name] = i + } + const relatedTable = await createTable(relatedSchema) + tableOrViewId = await createTableOrView({ + name: { name: "name", type: FieldType.STRING }, + related1: { + type: FieldType.LINK, + name: "related1", + fieldName: "main1", + tableId: relatedTable, + relationshipType: RelationshipType.MANY_TO_MANY, + }, + }) + relatedRows = await Promise.all([ + config.api.row.save(relatedTable, row), + ]) + await config.api.row.save(tableOrViewId, { + name: "foo", + related1: [relatedRows[0]._id], + }) + }) + + it("retrieve the row with relationships", async () => { + await expectQuery({}).toContainExactly([ + { + name: "foo", + related1: [{ _id: relatedRows[0]._id }], + }, + ]) + }) + }) + + !isInternal && + describe("SQL injection", () => { + const badStrings = [ + "1; DROP TABLE %table_name%;", + "1; DELETE FROM %table_name%;", + "1; UPDATE %table_name% SET name = 'foo';", + "1; INSERT INTO %table_name% (name) VALUES ('foo');", + "' OR '1'='1' --", + "'; DROP TABLE %table_name%; --", + "' OR 1=1 --", + "' UNION SELECT null, null, null; --", + "' AND (SELECT COUNT(*) FROM %table_name%) > 0 --", + "\"; EXEC xp_cmdshell('dir'); --", + "\"' OR 'a'='a", + "OR 1=1;", + "'; SHUTDOWN --", + ] + + describe.each(badStrings)( + "bad string: %s", + badStringTemplate => { + // The SQL that knex generates when you try to use a double quote in a + // field name is always invalid and never works, so we skip it for these + // tests. + const skipFieldNameCheck = + isOracle && badStringTemplate.includes('"') + + !skipFieldNameCheck && + it("should not allow SQL injection as a field name", async () => { + const tableOrViewId = await createTableOrView() + const table = await getTable(tableOrViewId) + const badString = badStringTemplate.replace( + /%table_name%/g, + table.name + ) + + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + [badString]: { + name: badString, + type: FieldType.STRING, + }, + }, + }) + + if (docIds.isViewId(tableOrViewId)) { + const view = await config.api.viewV2.get( + tableOrViewId + ) + await config.api.viewV2.update({ + ...view, + schema: { + [badString]: { visible: true }, + }, + }) + } + + await config.api.row.save(tableOrViewId, { + [badString]: "foo", + }) + + await assertTableExists(table) + await assertTableNumRows(table, 1) + + const { rows } = await config.api.row.search( + tableOrViewId, + { query: {} }, + { status: 200 } + ) + + expect(rows).toHaveLength(1) + + await assertTableExists(table) + await assertTableNumRows(table, 1) + }) + + it("should not allow SQL injection as a field value", async () => { + const tableOrViewId = await createTableOrView({ + foo: { + name: "foo", + type: FieldType.STRING, + }, + }) + const table = await getTable(tableOrViewId) + const badString = badStringTemplate.replace( + /%table_name%/g, + table.name + ) + + await config.api.row.save(tableOrViewId, { foo: "foo" }) + + await assertTableExists(table) + await assertTableNumRows(table, 1) + + const { rows } = await config.api.row.search( + tableOrViewId, + { query: { equal: { foo: badString } } }, + { status: 200 } + ) + + expect(rows).toBeEmpty() + await assertTableExists(table) + await assertTableNumRows(table, 1) + }) + } + ) + }) + } + ) }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index b9d8696714..8556a598c6 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -38,203 +38,758 @@ import timekeeper from "timekeeper" const { basicTable } = setup.structures const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/ -datasourceDescribe( - { name: "/tables (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, dsProvider, isInternal, isOracle }) => { - let datasource: Datasource | undefined +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - }) +if (descriptions.length) { + describe.each(descriptions)( + "/tables ($dbName)", + ({ config, dsProvider, isInternal, isOracle }) => { + let datasource: Datasource | undefined - describe("create", () => { - beforeEach(() => { - jest.clearAllMocks() + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource }) - let names = [ - "alphanum", - "with spaces", - "with-dashes", - "with_underscores", - "with `backticks`", - ] - - if (!isOracle) { - names.push(`with "double quotes"`) - names.push(`with 'single quotes'`) - } - - it.each(names)("creates a table with name: %s", async name => { - const table = await config.api.table.save( - tableForDatasource(datasource, { name }) - ) - expect(table.name).toEqual(name) - expect(events.table.created).toHaveBeenCalledTimes(1) - expect(events.table.created).toHaveBeenCalledWith(table) - - const res = await config.api.table.get(table._id!) - expect(res.name).toEqual(name) - }) - - it("creates a table via data import", async () => { - const table: SaveTableRequest = basicTable() - table.rows = [{ name: "test-name", description: "test-desc" }] - - const res = await config.api.table.save(table) - - expect(events.table.created).toHaveBeenCalledTimes(1) - expect(events.table.created).toHaveBeenCalledWith(res) - expect(events.table.imported).toHaveBeenCalledTimes(1) - expect(events.table.imported).toHaveBeenCalledWith(res) - expect(events.rows.imported).toHaveBeenCalledTimes(1) - expect(events.rows.imported).toHaveBeenCalledWith(res, 1) - }) - - it("should not allow a column to have a default value and be required", async () => { - await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - default: "default", - constraints: { - presence: true, - }, - }, - }, - }), - { - status: 400, - body: { - message: - 'Cannot make field "name" required, it has a default value.', - }, - } - ) - }) - - it("should apply authorization to endpoint", async () => { - await checkBuilderEndpoint({ - config, - method: "POST", - url: `/api/tables`, - body: basicTable(), + describe("create", () => { + beforeEach(() => { + jest.clearAllMocks() }) - }) - it("does not persist the row fields that are not on the table schema", async () => { - const table: SaveTableRequest = basicTable() - table.rows = [ - { - name: "test-name", - description: "test-desc", - nonValid: "test-non-valid", - }, + let names = [ + "alphanum", + "with spaces", + "with-dashes", + "with_underscores", + "with `backticks`", ] - const res = await config.api.table.save(table) + if (!isOracle) { + names.push(`with "double quotes"`) + names.push(`with 'single quotes'`) + } - const persistedRows = await config.api.row.search(res._id!) + it.each(names)("creates a table with name: %s", async name => { + const table = await config.api.table.save( + tableForDatasource(datasource, { name }) + ) + expect(table.name).toEqual(name) + expect(events.table.created).toHaveBeenCalledTimes(1) + expect(events.table.created).toHaveBeenCalledWith(table) - expect(persistedRows.rows).toEqual([ - expect.objectContaining({ - name: "test-name", - description: "test-desc", - }), - ]) - expect(persistedRows.rows[0].nonValid).toBeUndefined() - }) + const res = await config.api.table.get(table._id!) + expect(res.name).toEqual(name) + }) - it.each( - isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS - )( - "cannot use protected column names (%s) while importing a table", - async columnName => { + it("creates a table via data import", async () => { + const table: SaveTableRequest = basicTable() + table.rows = [{ name: "test-name", description: "test-desc" }] + + const res = await config.api.table.save(table) + + expect(events.table.created).toHaveBeenCalledTimes(1) + expect(events.table.created).toHaveBeenCalledWith(res) + expect(events.table.imported).toHaveBeenCalledTimes(1) + expect(events.table.imported).toHaveBeenCalledWith(res) + expect(events.rows.imported).toHaveBeenCalledTimes(1) + expect(events.rows.imported).toHaveBeenCalledWith(res, 1) + }) + + it("should not allow a column to have a default value and be required", async () => { + await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + default: "default", + constraints: { + presence: true, + }, + }, + }, + }), + { + status: 400, + body: { + message: + 'Cannot make field "name" required, it has a default value.', + }, + } + ) + }) + + it("should apply authorization to endpoint", async () => { + await checkBuilderEndpoint({ + config, + method: "POST", + url: `/api/tables`, + body: basicTable(), + }) + }) + + it("does not persist the row fields that are not on the table schema", async () => { const table: SaveTableRequest = basicTable() table.rows = [ { name: "test-name", description: "test-desc", + nonValid: "test-non-valid", }, ] - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - [columnName]: { - name: columnName, - type: FieldType.STRING, + const res = await config.api.table.save(table) + + const persistedRows = await config.api.row.search(res._id!) + + expect(persistedRows.rows).toEqual([ + expect.objectContaining({ + name: "test-name", + description: "test-desc", + }), + ]) + expect(persistedRows.rows[0].nonValid).toBeUndefined() + }) + + it.each( + isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS + )( + "cannot use protected column names (%s) while importing a table", + async columnName => { + const table: SaveTableRequest = basicTable() + table.rows = [ + { + name: "test-name", + description: "test-desc", + }, + ] + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + [columnName]: { + name: columnName, + type: FieldType.STRING, + }, }, }, - }, - { - status: 400, - body: { - message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`, + { status: 400, - }, - } - ) - } - ) - }) - - describe("permissions", () => { - it("get the base permissions for the table", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - }, - }) + body: { + message: `Column(s) "${columnName}" are duplicated - check for other columns with these name (case in-sensitive)`, + status: 400, + }, + } + ) + } ) + }) - // get the explicit permissions - const { permissions } = await config.api.permission.get(table._id!, { - status: 200, - }) - const explicitPermissions = { - role: "ADMIN", - permissionType: "EXPLICIT", - } - expect(permissions.write).toEqual(explicitPermissions) - expect(permissions.read).toEqual(explicitPermissions) - - // revoke the explicit permissions - for (let level of [PermissionLevel.WRITE, PermissionLevel.READ]) { - await config.api.permission.revoke( - { - roleId: permissions[level].role, - resourceId: table._id!, - level, - }, - { status: 200 } + describe("permissions", () => { + it("get the base permissions for the table", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + }, + }) ) - } - // check base permissions - const { permissions: basePermissions } = - await config.api.permission.get(table._id!, { + // get the explicit permissions + const { permissions } = await config.api.permission.get(table._id!, { status: 200, }) - const basePerms = { role: "BASIC", permissionType: "BASE" } - expect(basePermissions.write).toEqual(basePerms) - expect(basePermissions.read).toEqual(basePerms) - }) - }) + const explicitPermissions = { + role: "ADMIN", + permissionType: "EXPLICIT", + } + expect(permissions.write).toEqual(explicitPermissions) + expect(permissions.read).toEqual(explicitPermissions) - describe("update", () => { - it("updates a table", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { + // revoke the explicit permissions + for (let level of [PermissionLevel.WRITE, PermissionLevel.READ]) { + await config.api.permission.revoke( + { + roleId: permissions[level].role, + resourceId: table._id!, + level, + }, + { status: 200 } + ) + } + + // check base permissions + const { permissions: basePermissions } = + await config.api.permission.get(table._id!, { + status: 200, + }) + const basePerms = { role: "BASIC", permissionType: "BASE" } + expect(basePermissions.write).toEqual(basePerms) + expect(basePermissions.read).toEqual(basePerms) + }) + }) + + describe("update", () => { + it("updates a table", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + type: FieldType.STRING, + name: "name", + constraints: { + type: "string", + }, + }, + }, + }) + ) + + const updatedTable = await config.api.table.save({ + ...table, + name: generator.guid(), + }) + + expect(events.table.updated).toHaveBeenCalledTimes(1) + expect(events.table.updated).toHaveBeenCalledWith(updatedTable) + }) + + it("updates all the row fields for a table when a schema key is renamed", async () => { + const testTable = await config.api.table.save(basicTable(datasource)) + await config.createLegacyView({ + name: "TestView", + field: "Price", + calculation: ViewCalculation.STATISTICS, + tableId: testTable._id!, + schema: {}, + filters: [], + }) + + const testRow = await config.api.row.save(testTable._id!, { + name: "test", + }) + + const { name, ...otherColumns } = testTable.schema + const updatedTable = await config.api.table.save({ + ...testTable, + _rename: { + old: "name", + updated: "updatedName", + }, + schema: { + ...otherColumns, + updatedName: { + ...name, + name: "updatedName", + }, + }, + }) + + expect(updatedTable.name).toEqual(testTable.name) + + const res = await config.api.row.get(testTable._id!, testRow._id!) + expect(res.updatedName).toEqual("test") + expect(res.name).toBeUndefined() + }) + + isInternal && + it("updates only the passed fields", async () => { + await timekeeper.withFreeze(new Date(2021, 1, 1), async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + autoId: { + name: "id", + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + constraints: { + type: "number", + presence: false, + }, + }, + }, + }) + ) + + const newName = generator.guid() + + const updatedTable = await config.api.table.save({ + ...table, + name: newName, + }) + + let expected: Table = { + ...table, + name: newName, + _id: expect.any(String), + } + if (isInternal) { + expected._rev = expect.stringMatching(/^2-.+/) + } + + expect(updatedTable).toEqual(expect.objectContaining(expected)) + + const persistedTable = await config.api.table.get( + updatedTable._id! + ) + expected = { + ...table, + name: newName, + _id: updatedTable._id, + } + if (datasource?.isSQL) { + expected.sql = true + } + if (isInternal) { + expected._rev = expect.stringMatching(/^2-.+/) + } + expect(persistedTable).toEqual(expect.objectContaining(expected)) + }) + }) + + describe("user table", () => { + isInternal && + it("should add roleId and email field when adjusting user table schema", async () => { + const table = await config.api.table.save({ + ...basicTable(datasource), + _id: "ta_users", + }) + expect(table.schema.email).toBeDefined() + expect(table.schema.roleId).toBeDefined() + }) + }) + + describe("default field validation", () => { + it("should error if an existing column is set to required and has a default value", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + default: "default", + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + default: "default", + constraints: { + presence: true, + }, + }, + }, + }, + { + status: 400, + body: { + message: + 'Cannot make field "name" required, it has a default value.', + }, + } + ) + }) + + it("should error if an existing column is given a default value and is required", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { + presence: true, + }, + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + default: "default", + constraints: { + presence: true, + }, + }, + }, + }, + { + status: 400, + body: { + message: + 'Cannot make field "name" required, it has a default value.', + }, + } + ) + }) + + it("should be able to set an existing column to have a default value if it's not required", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + default: "default", + }, + }, + }, + { status: 200 } + ) + }) + + it("should be able to remove a default value if the column is not required", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + schema: { + name: { + name: "name", + type: FieldType.STRING, + default: "default", + }, + }, + }) + ) + + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }, + { status: 200 } + ) + }) + }) + + describe("external table validation", () => { + !isInternal && + it("should error if column is of type auto", async () => { + const table = basicTable(datasource) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + auto: { + name: "auto", + autocolumn: true, + type: FieldType.AUTO, + subtype: AutoFieldSubType.AUTO_ID, + }, + }, + }, + { + status: 400, + body: { + message: `Column "auto" has type "${FieldType.AUTO}" - this is not supported.`, + }, + } + ) + }) + + !isInternal && + it("should error if column has auto subtype", async () => { + const table = basicTable(datasource) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + auto: { + name: "auto", + autocolumn: true, + type: FieldType.NUMBER, + subtype: AutoFieldSubType.AUTO_ID, + }, + }, + }, + { + status: 400, + body: { + message: `Column "auto" has subtype "${AutoFieldSubType.AUTO_ID}" - this is not supported.`, + }, + } + ) + }) + }) + + isInternal && + it("shouldn't allow duplicate column names", async () => { + const saveTableRequest: SaveTableRequest = { + ...basicTable(), + } + saveTableRequest.schema["Type"] = { + type: FieldType.STRING, + name: "Type", + } + // allow the "Type" column - internal columns aren't case sensitive + await config.api.table.save(saveTableRequest, { + status: 200, + }) + saveTableRequest.schema.foo = { + type: FieldType.STRING, + name: "foo", + } + saveTableRequest.schema.FOO = { + type: FieldType.STRING, + name: "FOO", + } + + await config.api.table.save(saveTableRequest, { + status: 400, + body: { + message: + 'Column(s) "foo" are duplicated - check for other columns with these name (case in-sensitive)', + }, + }) + }) + + it("should add a new column for an internal DB table", async () => { + const saveTableRequest: SaveTableRequest = { + ...basicTable(), + } + + const response = await config.api.table.save(saveTableRequest) + + const expectedResponse = { + ...saveTableRequest, + _rev: expect.stringMatching(/^\d-.+/), + _id: expect.stringMatching(/^ta_.+/), + createdAt: expect.stringMatching(ISO_REGEX_PATTERN), + updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), + views: {}, + } + expect(response).toEqual(expectedResponse) + }) + }) + + describe("import", () => { + it("imports rows successfully", async () => { + const name = generator.guid() + const table = await config.api.table.save( + basicTable(datasource, { name }) + ) + const importRequest = { + schema: table.schema, + rows: [{ name: "test-name", description: "test-desc" }], + } + + jest.clearAllMocks() + + await config.api.table.import(table._id!, importRequest) + + expect(events.table.created).not.toHaveBeenCalled() + expect(events.rows.imported).toHaveBeenCalledTimes(1) + expect(events.rows.imported).toHaveBeenCalledWith( + expect.objectContaining({ + name, + _id: table._id, + }), + 1 + ) + }) + }) + + describe("fetch", () => { + let testTable: Table + + beforeEach(async () => { + testTable = await config.api.table.save( + basicTable(datasource, { name: generator.guid() }) + ) + }) + + it("returns all tables", async () => { + const res = await config.api.table.fetch() + const table = res.find(t => t._id === testTable._id) + expect(table).toBeDefined() + expect(table!.name).toEqual(testTable.name) + expect(table!.type).toEqual("table") + expect(table!.sourceType).toEqual(testTable.sourceType) + }) + + it("should apply authorization to endpoint", async () => { + await checkBuilderEndpoint({ + config, + method: "GET", + url: `/api/tables`, + }) + }) + + it("should enrich the view schemas", async () => { + const viewV2 = await config.api.viewV2.create({ + tableId: testTable._id!, + name: generator.guid(), + }) + const legacyView = await config.api.legacyView.save({ + tableId: testTable._id!, + name: generator.guid(), + filters: [], + schema: {}, + }) + + const res = await config.api.table.fetch() + + const table = res.find(t => t._id === testTable._id) + expect(table).toBeDefined() + expect(table!.views![viewV2.name]).toBeDefined() + + const expectedViewV2: ViewV2Enriched = { + ...viewV2, + schema: { + description: { + constraints: { + type: "string", + }, + name: "description", + type: FieldType.STRING, + visible: false, + }, + name: { + constraints: { + type: "string", + }, + name: "name", + type: FieldType.STRING, + visible: false, + }, + }, + } + + if (!isInternal) { + expectedViewV2.schema!.id = { + name: "id", + type: FieldType.NUMBER, + visible: false, + autocolumn: true, + } + } + + expect(table!.views![viewV2.name!]).toEqual(expectedViewV2) + + if (isInternal) { + expect(table!.views![legacyView.name!]).toBeDefined() + expect(table!.views![legacyView.name!]).toEqual({ + ...legacyView, + schema: { + description: { + constraints: { + type: "string", + }, + name: "description", + type: "string", + }, + name: { + constraints: { + type: "string", + }, + name: "name", + type: "string", + }, + }, + }) + } + }) + }) + + describe("get", () => { + it("returns a table", async () => { + const table = await config.api.table.save( + basicTable(datasource, { name: generator.guid() }) + ) + const res = await config.api.table.get(table._id!) + expect(res).toEqual(expect.objectContaining(table)) + }) + }) + + describe("indexing", () => { + it("should be able to create a table with indexes", async () => { + await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + const indexCount = (await db.getIndexes()).total_rows + const table = basicTable() + table.indexes = ["name"] + const res = await config.api.table.save(table) + expect(res._id).toBeDefined() + expect(res._rev).toBeDefined() + expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) + // update index to see what happens + table.indexes = ["name", "description"] + await config.api.table.save({ + ...table, + _id: res._id, + _rev: res._rev, + }) + // shouldn't have created a new index + expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) + }) + }) + }) + + describe("destroy", () => { + let testTable: Table + + beforeEach(async () => { + testTable = await config.createTable() + }) + + it("returns a success response when a table is deleted.", async () => { + await config.api.table.destroy(testTable._id!, testTable._rev!, { + body: { message: `Table ${testTable._id} deleted.` }, + }) + expect(events.table.deleted).toHaveBeenCalledTimes(1) + expect(events.table.deleted).toHaveBeenCalledWith( + expect.objectContaining({ + ...testTable, + tableId: testTable._id, + }) + ) + }) + + it("deletes linked references to the table after deletion", async () => { + const linkedTable = await config.createTable({ + name: "LinkedTable", + type: "table", schema: { name: { type: FieldType.STRING, @@ -243,68 +798,299 @@ datasourceDescribe( type: "string", }, }, + TestTable: { + type: FieldType.LINK, + relationshipType: RelationshipType.ONE_TO_MANY, + name: "TestTable", + fieldName: "TestTable", + tableId: testTable._id!, + constraints: { + type: "array", + }, + }, }, }) - ) - const updatedTable = await config.api.table.save({ - ...table, - name: generator.guid(), + await config.api.table.destroy(testTable._id!, testTable._rev!, { + body: { message: `Table ${testTable._id} deleted.` }, + }) + const dependentTable = await config.api.table.get(linkedTable._id!) + expect(dependentTable.schema.TestTable).not.toBeDefined() }) - expect(events.table.updated).toHaveBeenCalledTimes(1) - expect(events.table.updated).toHaveBeenCalledWith(updatedTable) + it("should apply authorization to endpoint", async () => { + await checkBuilderEndpoint({ + config, + method: "DELETE", + url: `/api/tables/${testTable._id}/${testTable._rev}`, + }) + }) }) - it("updates all the row fields for a table when a schema key is renamed", async () => { - const testTable = await config.api.table.save(basicTable(datasource)) - await config.createLegacyView({ - name: "TestView", - field: "Price", - calculation: ViewCalculation.STATISTICS, - tableId: testTable._id!, - schema: {}, - filters: [], + describe("migrate", () => { + let users: User[] + beforeAll(async () => { + users = await Promise.all([ + config.createUser({ email: `${uuid.v4()}@example.com` }), + config.createUser({ email: `${uuid.v4()}@example.com` }), + config.createUser({ email: `${uuid.v4()}@example.com` }), + ]) }) - const testRow = await config.api.row.save(testTable._id!, { - name: "test", - }) - - const { name, ...otherColumns } = testTable.schema - const updatedTable = await config.api.table.save({ - ...testTable, - _rename: { - old: "name", - updated: "updatedName", - }, - schema: { - ...otherColumns, - updatedName: { - ...name, - name: "updatedName", + it("should successfully migrate a one-to-many user relationship to a user column", async () => { + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.ONE_TO_MANY, + tableId: InternalTable.USER_METADATA, + }, }, - }, + }) + + const rows = await Promise.all( + users.map(u => + config.api.row.save(table._id!, { "user relationship": [u] }) + ) + ) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const migratedRows = await config.api.row.fetch(table._id!) + + rows.sort((a, b) => a._id!.localeCompare(b._id!)) + migratedRows.sort((a, b) => a._id!.localeCompare(b._id!)) + + for (const [i, row] of rows.entries()) { + const migratedRow = migratedRows[i] + expect(migratedRow["user column"]).toBeDefined() + expect(migratedRow["user relationship"]).not.toBeDefined() + expect(row["user relationship"][0]._id).toEqual( + migratedRow["user column"]._id + ) + } }) - expect(updatedTable.name).toEqual(testTable.name) + it("should succeed when the row is created from the other side of the relationship", async () => { + // We found a bug just after releasing this feature where if the row was created from the + // users table, not the table linking to it, the migration would succeed but lose the data. + // This happened because the order of the documents in the link was reversed. + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: InternalTable.USER_METADATA, + }, + }, + }) - const res = await config.api.row.get(testTable._id!, testRow._id!) - expect(res.updatedName).toEqual("test") - expect(res.name).toBeUndefined() - }) + let testRow = await config.api.row.save(table._id!, {}) - isInternal && - it("updates only the passed fields", async () => { - await timekeeper.withFreeze(new Date(2021, 1, 1), async () => { - const table = await config.api.table.save( + await Promise.all( + users.map(u => + config.api.row.patch(InternalTable.USER_METADATA, { + tableId: InternalTable.USER_METADATA, + _rev: u._rev!, + _id: u._id!, + test: [testRow], + }) + ) + ) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const migratedRow = await config.api.row.get(table._id!, testRow._id!) + + expect(migratedRow["user column"]).toBeDefined() + expect(migratedRow["user relationship"]).not.toBeDefined() + expect(migratedRow["user column"]).toHaveLength(3) + expect(migratedRow["user column"].map((u: Row) => u._id)).toEqual( + expect.arrayContaining(users.map(u => u._id)) + ) + }) + + it("should successfully migrate a many-to-many user relationship to a users column", async () => { + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_MANY, + tableId: InternalTable.USER_METADATA, + }, + }, + }) + + const row1 = await config.api.row.save(table._id!, { + "user relationship": [users[0], users[1]], + }) + + const row2 = await config.api.row.save(table._id!, { + "user relationship": [users[1], users[2]], + }) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const row1Migrated = await config.api.row.get(table._id!, row1._id!) + expect(row1Migrated["user relationship"]).not.toBeDefined() + expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( + expect.arrayContaining([users[0]._id, users[1]._id]) + ) + + const row2Migrated = await config.api.row.get(table._id!, row2._id!) + expect(row2Migrated["user relationship"]).not.toBeDefined() + expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual( + expect.arrayContaining([users[1]._id, users[2]._id]) + ) + }) + + it("should successfully migrate a many-to-one user relationship to a users column", async () => { + const table = await config.api.table.save({ + name: "table", + type: "table", + sourceId: INTERNAL_TABLE_SOURCE_ID, + sourceType: TableSourceType.INTERNAL, + schema: { + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: InternalTable.USER_METADATA, + }, + }, + }) + + const row1 = await config.api.row.save(table._id!, { + "user relationship": [users[0], users[1]], + }) + + const row2 = await config.api.row.save(table._id!, { + "user relationship": [users[2]], + }) + + await config.api.table.migrate(table._id!, { + oldColumn: "user relationship", + newColumn: "user column", + }) + + const migratedTable = await config.api.table.get(table._id!) + expect(migratedTable.schema["user column"]).toEqual({ + name: "user column", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: "array", + }, + }) + expect(migratedTable.schema["user relationship"]).not.toBeDefined() + + const row1Migrated = await config.api.row.get(table._id!, row1._id!) + expect(row1Migrated["user relationship"]).not.toBeDefined() + expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( + expect.arrayContaining([users[0]._id, users[1]._id]) + ) + + const row2Migrated = await config.api.row.get(table._id!, row2._id!) + expect(row2Migrated["user relationship"]).not.toBeDefined() + expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([ + users[2]._id, + ]) + }) + + describe("unhappy paths", () => { + let table: Table + beforeAll(async () => { + table = await config.api.table.save( tableForDatasource(datasource, { schema: { - autoId: { - name: "id", + "user relationship": { + type: FieldType.LINK, + fieldName: "test", + name: "user relationship", + constraints: { + type: "array", + presence: false, + }, + relationshipType: RelationshipType.MANY_TO_ONE, + tableId: InternalTable.USER_METADATA, + }, + num: { type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - autocolumn: true, + name: "num", constraints: { type: "number", presence: false, @@ -313,1077 +1099,238 @@ datasourceDescribe( }, }) ) - - const newName = generator.guid() - - const updatedTable = await config.api.table.save({ - ...table, - name: newName, - }) - - let expected: Table = { - ...table, - name: newName, - _id: expect.any(String), - } - if (isInternal) { - expected._rev = expect.stringMatching(/^2-.+/) - } - - expect(updatedTable).toEqual(expect.objectContaining(expected)) - - const persistedTable = await config.api.table.get(updatedTable._id!) - expected = { - ...table, - name: newName, - _id: updatedTable._id, - } - if (datasource?.isSQL) { - expected.sql = true - } - if (isInternal) { - expected._rev = expect.stringMatching(/^2-.+/) - } - expect(persistedTable).toEqual(expect.objectContaining(expected)) }) - }) - describe("user table", () => { - isInternal && - it("should add roleId and email field when adjusting user table schema", async () => { - const table = await config.api.table.save({ - ...basicTable(datasource), - _id: "ta_users", - }) - expect(table.schema.email).toBeDefined() - expect(table.schema.roleId).toBeDefined() - }) - }) - - describe("default field validation", () => { - it("should error if an existing column is set to required and has a default value", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - default: "default", - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - default: "default", - constraints: { - presence: true, - }, - }, - }, - }, - { - status: 400, - body: { - message: - 'Cannot make field "name" required, it has a default value.', - }, - } - ) - }) - - it("should error if an existing column is given a default value and is required", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { - presence: true, - }, - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - default: "default", - constraints: { - presence: true, - }, - }, - }, - }, - { - status: 400, - body: { - message: - 'Cannot make field "name" required, it has a default value.', - }, - } - ) - }) - - it("should be able to set an existing column to have a default value if it's not required", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - default: "default", - }, - }, - }, - { status: 200 } - ) - }) - - it("should be able to remove a default value if the column is not required", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - name: { - name: "name", - type: FieldType.STRING, - default: "default", - }, - }, - }) - ) - - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - }, - }, - }, - { status: 200 } - ) - }) - }) - - describe("external table validation", () => { - !isInternal && - it("should error if column is of type auto", async () => { - const table = basicTable(datasource) - await config.api.table.save( + it("should fail if the new column name is blank", async () => { + await config.api.table.migrate( + table._id!, { - ...table, - schema: { - ...table.schema, - auto: { - name: "auto", - autocolumn: true, - type: FieldType.AUTO, - subtype: AutoFieldSubType.AUTO_ID, - }, - }, + oldColumn: "user relationship", + newColumn: "", }, - { - status: 400, - body: { - message: `Column "auto" has type "${FieldType.AUTO}" - this is not supported.`, - }, - } + { status: 400 } ) }) - !isInternal && - it("should error if column has auto subtype", async () => { - const table = basicTable(datasource) - await config.api.table.save( + it("should fail if the new column name is a reserved name", async () => { + await config.api.table.migrate( + table._id!, { - ...table, - schema: { - ...table.schema, - auto: { - name: "auto", - autocolumn: true, - type: FieldType.NUMBER, - subtype: AutoFieldSubType.AUTO_ID, - }, - }, + oldColumn: "user relationship", + newColumn: "_id", }, - { - status: 400, - body: { - message: `Column "auto" has subtype "${AutoFieldSubType.AUTO_ID}" - this is not supported.`, - }, - } + { status: 400 } ) }) - }) - isInternal && - it("shouldn't allow duplicate column names", async () => { - const saveTableRequest: SaveTableRequest = { - ...basicTable(), - } - saveTableRequest.schema["Type"] = { - type: FieldType.STRING, - name: "Type", - } - // allow the "Type" column - internal columns aren't case sensitive - await config.api.table.save(saveTableRequest, { - status: 200, + it("should fail if the new column name is the same as an existing column", async () => { + await config.api.table.migrate( + table._id!, + { + oldColumn: "user relationship", + newColumn: "num", + }, + { status: 400 } + ) }) - saveTableRequest.schema.foo = { type: FieldType.STRING, name: "foo" } - saveTableRequest.schema.FOO = { type: FieldType.STRING, name: "FOO" } - await config.api.table.save(saveTableRequest, { - status: 400, - body: { - message: - 'Column(s) "foo" are duplicated - check for other columns with these name (case in-sensitive)', - }, + it("should fail if the old column name isn't a column in the table", async () => { + await config.api.table.migrate( + table._id!, + { + oldColumn: "not a column", + newColumn: "new column", + }, + { status: 400 } + ) }) }) - - it("should add a new column for an internal DB table", async () => { - const saveTableRequest: SaveTableRequest = { - ...basicTable(), - } - - const response = await config.api.table.save(saveTableRequest) - - const expectedResponse = { - ...saveTableRequest, - _rev: expect.stringMatching(/^\d-.+/), - _id: expect.stringMatching(/^ta_.+/), - createdAt: expect.stringMatching(ISO_REGEX_PATTERN), - updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), - views: {}, - } - expect(response).toEqual(expectedResponse) - }) - }) - - describe("import", () => { - it("imports rows successfully", async () => { - const name = generator.guid() - const table = await config.api.table.save( - basicTable(datasource, { name }) - ) - const importRequest = { - schema: table.schema, - rows: [{ name: "test-name", description: "test-desc" }], - } - - jest.clearAllMocks() - - await config.api.table.import(table._id!, importRequest) - - expect(events.table.created).not.toHaveBeenCalled() - expect(events.rows.imported).toHaveBeenCalledTimes(1) - expect(events.rows.imported).toHaveBeenCalledWith( - expect.objectContaining({ - name, - _id: table._id, - }), - 1 - ) - }) - }) - - describe("fetch", () => { - let testTable: Table - - beforeEach(async () => { - testTable = await config.api.table.save( - basicTable(datasource, { name: generator.guid() }) - ) }) - it("returns all tables", async () => { - const res = await config.api.table.fetch() - const table = res.find(t => t._id === testTable._id) - expect(table).toBeDefined() - expect(table!.name).toEqual(testTable.name) - expect(table!.type).toEqual("table") - expect(table!.sourceType).toEqual(testTable.sourceType) - }) - - it("should apply authorization to endpoint", async () => { - await checkBuilderEndpoint({ - config, - method: "GET", - url: `/api/tables`, - }) - }) - - it("should enrich the view schemas", async () => { - const viewV2 = await config.api.viewV2.create({ - tableId: testTable._id!, - name: generator.guid(), - }) - const legacyView = await config.api.legacyView.save({ - tableId: testTable._id!, - name: generator.guid(), - filters: [], - schema: {}, - }) - - const res = await config.api.table.fetch() - - const table = res.find(t => t._id === testTable._id) - expect(table).toBeDefined() - expect(table!.views![viewV2.name]).toBeDefined() - - const expectedViewV2: ViewV2Enriched = { - ...viewV2, - schema: { - description: { - constraints: { - type: "string", - }, - name: "description", - type: FieldType.STRING, - visible: false, - }, - name: { - constraints: { - type: "string", - }, - name: "name", - type: FieldType.STRING, - visible: false, - }, - }, - } - - if (!isInternal) { - expectedViewV2.schema!.id = { - name: "id", - type: FieldType.NUMBER, - visible: false, - autocolumn: true, - } - } - - expect(table!.views![viewV2.name!]).toEqual(expectedViewV2) - - if (isInternal) { - expect(table!.views![legacyView.name!]).toBeDefined() - expect(table!.views![legacyView.name!]).toEqual({ - ...legacyView, - schema: { - description: { - constraints: { - type: "string", - }, - name: "description", - type: "string", - }, - name: { - constraints: { - type: "string", - }, - name: "name", - type: "string", - }, - }, - }) - } - }) - }) - - describe("get", () => { - it("returns a table", async () => { - const table = await config.api.table.save( - basicTable(datasource, { name: generator.guid() }) - ) - const res = await config.api.table.get(table._id!) - expect(res).toEqual(expect.objectContaining(table)) - }) - }) - - describe("indexing", () => { - it("should be able to create a table with indexes", async () => { - await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - const indexCount = (await db.getIndexes()).total_rows - const table = basicTable() - table.indexes = ["name"] - const res = await config.api.table.save(table) - expect(res._id).toBeDefined() - expect(res._rev).toBeDefined() - expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) - // update index to see what happens - table.indexes = ["name", "description"] - await config.api.table.save({ - ...table, - _id: res._id, - _rev: res._rev, - }) - // shouldn't have created a new index - expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1) - }) - }) - }) - - describe("destroy", () => { - let testTable: Table - - beforeEach(async () => { - testTable = await config.createTable() - }) - - it("returns a success response when a table is deleted.", async () => { - await config.api.table.destroy(testTable._id!, testTable._rev!, { - body: { message: `Table ${testTable._id} deleted.` }, - }) - expect(events.table.deleted).toHaveBeenCalledTimes(1) - expect(events.table.deleted).toHaveBeenCalledWith( - expect.objectContaining({ - ...testTable, - tableId: testTable._id, - }) - ) - }) - - it("deletes linked references to the table after deletion", async () => { - const linkedTable = await config.createTable({ - name: "LinkedTable", - type: "table", - schema: { - name: { - type: FieldType.STRING, - name: "name", - constraints: { - type: "string", - }, - }, - TestTable: { - type: FieldType.LINK, - relationshipType: RelationshipType.ONE_TO_MANY, - name: "TestTable", - fieldName: "TestTable", - tableId: testTable._id!, - constraints: { - type: "array", - }, - }, - }, - }) - - await config.api.table.destroy(testTable._id!, testTable._rev!, { - body: { message: `Table ${testTable._id} deleted.` }, - }) - const dependentTable = await config.api.table.get(linkedTable._id!) - expect(dependentTable.schema.TestTable).not.toBeDefined() - }) - - it("should apply authorization to endpoint", async () => { - await checkBuilderEndpoint({ - config, - method: "DELETE", - url: `/api/tables/${testTable._id}/${testTable._rev}`, - }) - }) - }) - - describe("migrate", () => { - let users: User[] - beforeAll(async () => { - users = await Promise.all([ - config.createUser({ email: `${uuid.v4()}@example.com` }), - config.createUser({ email: `${uuid.v4()}@example.com` }), - config.createUser({ email: `${uuid.v4()}@example.com` }), - ]) - }) - - it("should successfully migrate a one-to-many user relationship to a user column", async () => { - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.ONE_TO_MANY, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - const rows = await Promise.all( - users.map(u => - config.api.row.save(table._id!, { "user relationship": [u] }) - ) - ) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const migratedRows = await config.api.row.fetch(table._id!) - - rows.sort((a, b) => a._id!.localeCompare(b._id!)) - migratedRows.sort((a, b) => a._id!.localeCompare(b._id!)) - - for (const [i, row] of rows.entries()) { - const migratedRow = migratedRows[i] - expect(migratedRow["user column"]).toBeDefined() - expect(migratedRow["user relationship"]).not.toBeDefined() - expect(row["user relationship"][0]._id).toEqual( - migratedRow["user column"]._id - ) - } - }) - - it("should succeed when the row is created from the other side of the relationship", async () => { - // We found a bug just after releasing this feature where if the row was created from the - // users table, not the table linking to it, the migration would succeed but lose the data. - // This happened because the order of the documents in the link was reversed. - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - let testRow = await config.api.row.save(table._id!, {}) - - await Promise.all( - users.map(u => - config.api.row.patch(InternalTable.USER_METADATA, { - tableId: InternalTable.USER_METADATA, - _rev: u._rev!, - _id: u._id!, - test: [testRow], - }) - ) - ) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const migratedRow = await config.api.row.get(table._id!, testRow._id!) - - expect(migratedRow["user column"]).toBeDefined() - expect(migratedRow["user relationship"]).not.toBeDefined() - expect(migratedRow["user column"]).toHaveLength(3) - expect(migratedRow["user column"].map((u: Row) => u._id)).toEqual( - expect.arrayContaining(users.map(u => u._id)) - ) - }) - - it("should successfully migrate a many-to-many user relationship to a users column", async () => { - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_MANY, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - const row1 = await config.api.row.save(table._id!, { - "user relationship": [users[0], users[1]], - }) - - const row2 = await config.api.row.save(table._id!, { - "user relationship": [users[1], users[2]], - }) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const row1Migrated = await config.api.row.get(table._id!, row1._id!) - expect(row1Migrated["user relationship"]).not.toBeDefined() - expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( - expect.arrayContaining([users[0]._id, users[1]._id]) - ) - - const row2Migrated = await config.api.row.get(table._id!, row2._id!) - expect(row2Migrated["user relationship"]).not.toBeDefined() - expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual( - expect.arrayContaining([users[1]._id, users[2]._id]) - ) - }) - - it("should successfully migrate a many-to-one user relationship to a users column", async () => { - const table = await config.api.table.save({ - name: "table", - type: "table", - sourceId: INTERNAL_TABLE_SOURCE_ID, - sourceType: TableSourceType.INTERNAL, - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: InternalTable.USER_METADATA, - }, - }, - }) - - const row1 = await config.api.row.save(table._id!, { - "user relationship": [users[0], users[1]], - }) - - const row2 = await config.api.row.save(table._id!, { - "user relationship": [users[2]], - }) - - await config.api.table.migrate(table._id!, { - oldColumn: "user relationship", - newColumn: "user column", - }) - - const migratedTable = await config.api.table.get(table._id!) - expect(migratedTable.schema["user column"]).toEqual({ - name: "user column", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: "array", - }, - }) - expect(migratedTable.schema["user relationship"]).not.toBeDefined() - - const row1Migrated = await config.api.row.get(table._id!, row1._id!) - expect(row1Migrated["user relationship"]).not.toBeDefined() - expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( - expect.arrayContaining([users[0]._id, users[1]._id]) - ) - - const row2Migrated = await config.api.row.get(table._id!, row2._id!) - expect(row2Migrated["user relationship"]).not.toBeDefined() - expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([ - users[2]._id, - ]) - }) - - describe("unhappy paths", () => { - let table: Table - beforeAll(async () => { - table = await config.api.table.save( - tableForDatasource(datasource, { - schema: { - "user relationship": { - type: FieldType.LINK, - fieldName: "test", - name: "user relationship", - constraints: { - type: "array", - presence: false, - }, - relationshipType: RelationshipType.MANY_TO_ONE, - tableId: InternalTable.USER_METADATA, - }, - num: { - type: FieldType.NUMBER, - name: "num", - constraints: { - type: "number", - presence: false, - }, - }, - }, - }) - ) - }) - - it("should fail if the new column name is blank", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "user relationship", - newColumn: "", - }, - { status: 400 } - ) - }) - - it("should fail if the new column name is a reserved name", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "user relationship", - newColumn: "_id", - }, - { status: 400 } - ) - }) - - it("should fail if the new column name is the same as an existing column", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "user relationship", - newColumn: "num", - }, - { status: 400 } - ) - }) - - it("should fail if the old column name isn't a column in the table", async () => { - await config.api.table.migrate( - table._id!, - { - oldColumn: "not a column", - newColumn: "new column", - }, - { status: 400 } - ) - }) - }) - }) - - describe.each([ - [ - RowExportFormat.CSV, - (val: any) => JSON.stringify(val).replace(/"/g, "'"), - ], - [RowExportFormat.JSON, (val: any) => val], - ])("import validation (%s)", (_, userParser) => { - const basicSchema: TableSchema = { - id: { - type: FieldType.NUMBER, - name: "id", - }, - name: { - type: FieldType.STRING, - name: "name", - }, - } - - const importCases: [ - string, - ( - rows: Row[], - schema: TableSchema - ) => Promise - ][] = [ + describe.each([ [ - "validateNewTableImport", - async (rows: Row[], schema: TableSchema) => { - const result = await config.api.table.validateNewTableImport({ - rows, - schema, - }) - return result - }, + RowExportFormat.CSV, + (val: any) => JSON.stringify(val).replace(/"/g, "'"), ], - [ - "validateExistingTableImport", - async (rows: Row[], schema: TableSchema) => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], + [RowExportFormat.JSON, (val: any) => val], + ])("import validation (%s)", (_, userParser) => { + const basicSchema: TableSchema = { + id: { + type: FieldType.NUMBER, + name: "id", + }, + name: { + type: FieldType.STRING, + name: "name", + }, + } + + const importCases: [ + string, + ( + rows: Row[], + schema: TableSchema + ) => Promise + ][] = [ + [ + "validateNewTableImport", + async (rows: Row[], schema: TableSchema) => { + const result = await config.api.table.validateNewTableImport({ + rows, schema, }) + return result + }, + ], + [ + "validateExistingTableImport", + async (rows: Row[], schema: TableSchema) => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema, + }) + ) + const result = await config.api.table.validateExistingTableImport( + { + tableId: table._id, + rows, + } + ) + return result + }, + ], + ] + + describe.each(importCases)("%s", (_, testDelegate) => { + it("validates basic imports", async () => { + const result = await testDelegate( + [{ id: generator.natural(), name: generator.first() }], + basicSchema ) - const result = await config.api.table.validateExistingTableImport({ - tableId: table._id, - rows, - }) - return result - }, - ], - ] - describe.each(importCases)("%s", (_, testDelegate) => { - it("validates basic imports", async () => { - const result = await testDelegate( - [{ id: generator.natural(), name: generator.first() }], - basicSchema - ) - - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - }, - }) - }) - - it.each( - isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS - )("don't allow protected names in schema (%s)", async columnName => { - const result = await config.api.table.validateNewTableImport({ - rows: [ - { - id: generator.natural(), - name: generator.first(), - [columnName]: generator.word(), + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, }, - ], - schema: { - ...basicSchema, - }, + }) }) - expect(result).toEqual({ - allValid: false, - errors: { - [columnName]: `${columnName} is a protected column name`, - }, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - [columnName]: false, - }, - }) - }) - - it("does not allow imports without rows", async () => { - const result = await testDelegate([], basicSchema) - - expect(result).toEqual({ - allValid: false, - errors: {}, - invalidColumns: [], - schemaValidation: {}, - }) - }) - - it("validates imports with some empty rows", async () => { - const result = await testDelegate( - [{}, { id: generator.natural(), name: generator.first() }, {}], - basicSchema - ) - - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - }, - }) - }) - - isInternal && it.each( isInternal ? PROTECTED_INTERNAL_COLUMNS : PROTECTED_EXTERNAL_COLUMNS - )( - "don't allow protected names in the rows (%s)", - async columnName => { - const result = await config.api.table.validateNewTableImport({ - rows: [ - { - id: generator.natural(), - name: generator.first(), + )("don't allow protected names in schema (%s)", async columnName => { + const result = await config.api.table.validateNewTableImport({ + rows: [ + { + id: generator.natural(), + name: generator.first(), + [columnName]: generator.word(), + }, + ], + schema: { + ...basicSchema, + }, + }) + + expect(result).toEqual({ + allValid: false, + errors: { + [columnName]: `${columnName} is a protected column name`, + }, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + [columnName]: false, + }, + }) + }) + + it("does not allow imports without rows", async () => { + const result = await testDelegate([], basicSchema) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: {}, + }) + }) + + it("validates imports with some empty rows", async () => { + const result = await testDelegate( + [{}, { id: generator.natural(), name: generator.first() }, {}], + basicSchema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + }, + }) + }) + + isInternal && + it.each( + isInternal + ? PROTECTED_INTERNAL_COLUMNS + : PROTECTED_EXTERNAL_COLUMNS + )( + "don't allow protected names in the rows (%s)", + async columnName => { + const result = await config.api.table.validateNewTableImport({ + rows: [ + { + id: generator.natural(), + name: generator.first(), + }, + ], + schema: { + ...basicSchema, + [columnName]: { + name: columnName, + type: FieldType.STRING, + }, }, - ], - schema: { - ...basicSchema, - [columnName]: { - name: columnName, - type: FieldType.STRING, + }) + + expect(result).toEqual({ + allValid: false, + errors: { + [columnName]: `${columnName} is a protected column name`, }, - }, - }) + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + [columnName]: false, + }, + }) + } + ) - expect(result).toEqual({ - allValid: false, - errors: { - [columnName]: `${columnName} is a protected column name`, - }, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - [columnName]: false, - }, - }) - } - ) - - it("validates required fields and valid rows", async () => { - const schema: TableSchema = { - ...basicSchema, - name: { - type: FieldType.STRING, - name: "name", - constraints: { presence: true }, - }, - } - - const result = await testDelegate( - [ - { id: generator.natural(), name: generator.first() }, - { id: generator.natural(), name: generator.first() }, - ], - schema - ) - - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - }, - }) - }) - - it("validates required fields and non-valid rows", async () => { - const schema: TableSchema = { - ...basicSchema, - name: { - type: FieldType.STRING, - name: "name", - constraints: { presence: true }, - }, - } - - const result = await testDelegate( - [ - { id: generator.natural(), name: generator.first() }, - { id: generator.natural(), name: "" }, - ], - schema - ) - - expect(result).toEqual({ - allValid: false, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: false, - }, - }) - }) - - describe("bb references", () => { - const getUserValues = () => ({ - _id: docIds.generateGlobalUserID(), - primaryDisplay: generator.first(), - email: generator.email({}), - }) - - it("can validate user column imports", async () => { + it("validates required fields and valid rows", async () => { const schema: TableSchema = { ...basicSchema, - user: { - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - name: "user", + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, }, } const result = await testDelegate( [ - { - id: generator.natural(), - name: generator.first(), - user: userParser(getUserValues()), - }, + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: generator.first() }, ], schema ) @@ -1395,33 +1342,24 @@ datasourceDescribe( schemaValidation: { id: true, name: true, - user: true, }, }) }) - it("can validate user column imports with invalid data", async () => { + it("validates required fields and non-valid rows", async () => { const schema: TableSchema = { ...basicSchema, - user: { - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - name: "user", + name: { + type: FieldType.STRING, + name: "name", + constraints: { presence: true }, }, } const result = await testDelegate( [ - { - id: generator.natural(), - name: generator.first(), - user: userParser(getUserValues()), - }, - { - id: generator.natural(), - name: generator.first(), - user: "no valid user data", - }, + { id: generator.natural(), name: generator.first() }, + { id: generator.natural(), name: "" }, ], schema ) @@ -1432,84 +1370,164 @@ datasourceDescribe( invalidColumns: [], schemaValidation: { id: true, - name: true, - user: false, + name: false, }, }) }) - it("can validate users column imports", async () => { - const schema: TableSchema = { - ...basicSchema, - user: { - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - name: "user", - externalType: "array", - }, - } + describe("bb references", () => { + const getUserValues = () => ({ + _id: docIds.generateGlobalUserID(), + primaryDisplay: generator.first(), + email: generator.email({}), + }) - const result = await testDelegate( - [ - { - id: generator.natural(), - name: generator.first(), - user: userParser([ - getUserValues(), - getUserValues(), - getUserValues(), - ]), + it("can validate user column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", }, - ], - schema - ) + } - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - id: true, - name: true, - user: true, - }, + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) + }) + + it("can validate user column imports with invalid data", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser(getUserValues()), + }, + { + id: generator.natural(), + name: generator.first(), + user: "no valid user data", + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: false, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: false, + }, + }) + }) + + it("can validate users column imports", async () => { + const schema: TableSchema = { + ...basicSchema, + user: { + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + name: "user", + externalType: "array", + }, + } + + const result = await testDelegate( + [ + { + id: generator.natural(), + name: generator.first(), + user: userParser([ + getUserValues(), + getUserValues(), + getUserValues(), + ]), + }, + ], + schema + ) + + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + id: true, + name: true, + user: true, + }, + }) }) }) }) - }) - describe("validateExistingTableImport", () => { - isInternal && - it("can reimport _id fields for internal tables", async () => { - const table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], - schema: basicSchema, - }) - ) - const result = await config.api.table.validateExistingTableImport({ - tableId: table._id, - rows: [ + describe("validateExistingTableImport", () => { + isInternal && + it("can reimport _id fields for internal tables", async () => { + const table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema: basicSchema, + }) + ) + const result = await config.api.table.validateExistingTableImport( { - _id: docIds.generateRowID(table._id!), - id: generator.natural(), - name: generator.first(), - }, - ], - }) + tableId: table._id, + rows: [ + { + _id: docIds.generateRowID(table._id!), + id: generator.natural(), + name: generator.first(), + }, + ], + } + ) - expect(result).toEqual({ - allValid: true, - errors: {}, - invalidColumns: [], - schemaValidation: { - _id: true, - id: true, - name: true, - }, + expect(result).toEqual({ + allValid: true, + errors: {}, + invalidColumns: [], + schemaValidation: { + _id: true, + id: true, + name: true, + }, + }) }) - }) + }) }) - }) - } -) + } + ) +} diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index 63d315cea9..6e82395e19 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -44,158 +44,163 @@ import merge from "lodash/merge" import { quotas } from "@budibase/pro" import { db, roles, context } from "@budibase/backend-core" -datasourceDescribe( - { name: "/v2/views (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, isInternal, dsProvider }) => { - let table: Table - let rawDatasource: Datasource | undefined - let datasource: Datasource | undefined +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - function saveTableRequest( - ...overrides: Partial>[] - ): SaveTableRequest { - const req: SaveTableRequest = { - name: generator.guid().replaceAll("-", "").substring(0, 16), - type: "table", - sourceType: datasource - ? TableSourceType.EXTERNAL - : TableSourceType.INTERNAL, - sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, - primary: ["id"], - schema: { - id: { - type: FieldType.NUMBER, - name: "id", - autocolumn: true, - constraints: { - presence: true, +if (descriptions.length) { + describe.each(descriptions)( + "/v2/views ($dbName)", + ({ config, isInternal, dsProvider }) => { + let table: Table + let rawDatasource: Datasource | undefined + let datasource: Datasource | undefined + + function saveTableRequest( + ...overrides: Partial>[] + ): SaveTableRequest { + const req: SaveTableRequest = { + name: generator.guid().replaceAll("-", "").substring(0, 16), + type: "table", + sourceType: datasource + ? TableSourceType.EXTERNAL + : TableSourceType.INTERNAL, + sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID, + primary: ["id"], + schema: { + id: { + type: FieldType.NUMBER, + name: "id", + autocolumn: true, + constraints: { + presence: true, + }, }, }, - }, + } + return merge(req, ...overrides) } - return merge(req, ...overrides) - } - function priceTable(): SaveTableRequest { - return saveTableRequest({ - schema: { - Price: { - type: FieldType.NUMBER, - name: "Price", - constraints: {}, - }, - Category: { - type: FieldType.STRING, - name: "Category", - constraints: { - type: "string", + function priceTable(): SaveTableRequest { + return saveTableRequest({ + schema: { + Price: { + type: FieldType.NUMBER, + name: "Price", + constraints: {}, + }, + Category: { + type: FieldType.STRING, + name: "Category", + constraints: { + type: "string", + }, }, }, - }, - }) - } - - beforeAll(async () => { - await config.init() - - const ds = await dsProvider() - rawDatasource = ds.rawDatasource - datasource = ds.datasource - table = await config.api.table.save(priceTable()) - }) - - beforeEach(() => { - jest.clearAllMocks() - mocks.licenses.useCloudFree() - }) - - describe("view crud", () => { - describe("create", () => { - it("persist the view when the view is successfully created", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - }, - } - const res = await config.api.viewV2.create(newView) - - expect(res).toEqual({ - ...newView, - id: expect.stringMatching(new RegExp(`${table._id!}_`)), - version: 2, - }) }) + } - it("can persist views with all fields", async () => { - const newView: Required> = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "id", - queryUI: { - groups: [ - { - filters: [ + beforeAll(async () => { + await config.init() + + const ds = await dsProvider() + rawDatasource = ds.rawDatasource + datasource = ds.datasource + table = await config.api.table.save(priceTable()) + }) + + beforeEach(() => { + jest.clearAllMocks() + mocks.licenses.useCloudFree() + }) + + describe("view crud", () => { + describe("create", () => { + it("persist the view when the view is successfully created", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + }, + } + const res = await config.api.viewV2.create(newView) + + expect(res).toEqual({ + ...newView, + id: expect.stringMatching(new RegExp(`${table._id!}_`)), + version: 2, + }) + }) + + it("can persist views with all fields", async () => { + const newView: Required> = + { + name: generator.name(), + tableId: table._id!, + primaryDisplay: "id", + queryUI: { + groups: [ { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], - }, - ], - }, - sort: { - field: "fieldToSort", - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - } - const res = await config.api.viewV2.create(newView) - - const expected: ViewV2 = { - ...newView, - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [ - { - $and: { - conditions: [ + filters: [ { - equal: { - field: "value", - }, + operator: BasicOperator.EQUAL, + field: "field", + value: "value", }, ], }, + ], + }, + sort: { + field: "fieldToSort", + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, + schema: { + id: { visible: true }, + Price: { + visible: true, }, - ], + }, + } + const res = await config.api.viewV2.create(newView) + + const expected: ViewV2 = { + ...newView, + schema: { + id: { visible: true }, + Price: { + visible: true, + }, }, - }, - id: expect.any(String), - version: 2, - } + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { + field: "value", + }, + }, + ], + }, + }, + ], + }, + }, + id: expect.any(String), + version: 2, + } - expect(res).toEqual(expected) - }) + expect(res).toEqual(expected) + }) - it("can create a view with just a query field, no queryUI, for backwards compatibility", async () => { - const newView: Required> = - { + it("can create a view with just a query field, no queryUI, for backwards compatibility", async () => { + const newView: Required< + Omit + > = { name: generator.name(), tableId: table._id!, primaryDisplay: "id", @@ -218,180 +223,194 @@ datasourceDescribe( }, }, } - const res = await config.api.viewV2.create(newView) + const res = await config.api.viewV2.create(newView) - const expected: ViewV2 = { - ...newView, - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - queryUI: { - logicalOperator: UILogicalOperator.ALL, - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], + const expected: ViewV2 = { + ...newView, + schema: { + id: { visible: true }, + Price: { + visible: true, }, - ], - }, - id: expect.any(String), - version: 2, - } - - expect(res).toEqual(expected) - }) - - it("persist only UI schema overrides", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { - name: "id", - type: FieldType.NUMBER, - visible: true, }, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - order: 1, - width: 100, - }, - Category: { - name: "Category", - type: FieldType.STRING, - visible: false, - icon: "ic", - }, - } as ViewV2Schema, - } - - const createdView = await config.api.viewV2.create(newView) - - expect(createdView).toEqual({ - ...newView, - schema: { - id: { visible: true }, - Price: { - visible: true, - order: 1, - width: 100, - }, - Category: { - visible: false, - icon: "ic", - }, - }, - id: createdView.id, - version: 2, - }) - }) - - it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { - name: "id", - type: FieldType.NUMBER, - autocolumn: true, - visible: true, - }, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - }, - Category: { - name: "Category", - type: FieldType.STRING, - }, - } as ViewV2Schema, - } - - await config.api.viewV2.create(newView, { - status: 201, - }) - }) - - it("does not persist non-visible fields", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "id", - schema: { - id: { visible: true }, - Price: { visible: true }, - Category: { visible: false }, - }, - } - const res = await config.api.viewV2.create(newView) - - expect(res).toEqual({ - ...newView, - schema: { - id: { visible: true }, - Price: { visible: true }, - Category: { visible: false }, - }, - id: expect.any(String), - version: 2, - }) - }) - - it("throws bad request when the schema fields are not valid", async () => { - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - nonExisting: { - visible: true, - }, - }, - } - await config.api.viewV2.create(newView, { - status: 400, - body: { - message: - 'Field "nonExisting" is not valid for the requested table', - }, - }) - }) - - describe("readonly fields", () => { - it("readonly fields are persisted", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, + queryUI: { + logicalOperator: UILogicalOperator.ALL, + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, - }) - ) + ], + }, + id: expect.any(String), + version: 2, + } + expect(res).toEqual(expected) + }) + + it("persist only UI schema overrides", async () => { const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, schema: { + id: { + name: "id", + type: FieldType.NUMBER, + visible: true, + }, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + order: 1, + width: 100, + }, + Category: { + name: "Category", + type: FieldType.STRING, + visible: false, + icon: "ic", + }, + } as ViewV2Schema, + } + + const createdView = await config.api.viewV2.create(newView) + + expect(createdView).toEqual({ + ...newView, + schema: { + id: { visible: true }, + Price: { + visible: true, + order: 1, + width: 100, + }, + Category: { + visible: false, + icon: "ic", + }, + }, + id: createdView.id, + version: 2, + }) + }) + + it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { + name: "id", + type: FieldType.NUMBER, + autocolumn: true, + visible: true, + }, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + }, + Category: { + name: "Category", + type: FieldType.STRING, + }, + } as ViewV2Schema, + } + + await config.api.viewV2.create(newView, { + status: 201, + }) + }) + + it("does not persist non-visible fields", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + primaryDisplay: "id", + schema: { + id: { visible: true }, + Price: { visible: true }, + Category: { visible: false }, + }, + } + const res = await config.api.viewV2.create(newView) + + expect(res).toEqual({ + ...newView, + schema: { + id: { visible: true }, + Price: { visible: true }, + Category: { visible: false }, + }, + id: expect.any(String), + version: 2, + }) + }) + + it("throws bad request when the schema fields are not valid", async () => { + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + nonExisting: { + visible: true, + }, + }, + } + await config.api.viewV2.create(newView, { + status: 400, + body: { + message: + 'Field "nonExisting" is not valid for the requested table', + }, + }) + }) + + describe("readonly fields", () => { + it("readonly fields are persisted", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + description: { + visible: true, + readonly: true, + }, + }, + } + + const res = await config.api.viewV2.create(newView) + expect(res.schema).toEqual({ id: { visible: true }, name: { visible: true, @@ -401,63 +420,122 @@ datasourceDescribe( visible: true, readonly: true, }, - }, - } - - const res = await config.api.viewV2.create(newView) - expect(res.schema).toEqual({ - id: { visible: true }, - name: { - visible: true, - readonly: true, - }, - description: { - visible: true, - readonly: true, - }, - }) - }) - - it("required fields cannot be marked as readonly", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, }) - ) + }) - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - name: { - visible: true, - readonly: true, + it("required fields cannot be marked as readonly", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, }, - }, - } + } - await config.api.viewV2.create(newView, { - status: 400, - body: { - message: - 'You can\'t make "name" readonly because it is a required field.', + await config.api.viewV2.create(newView, { status: 400, - }, + body: { + message: + 'You can\'t make "name" readonly because it is a required field.', + status: 400, + }, + }) + }) + + it("readonly fields must be visible", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: false, + readonly: true, + }, + }, + } + + await config.api.viewV2.create(newView, { + status: 400, + body: { + message: + 'Field "name" must be visible if you want to make it readonly', + status: 400, + }, + }) + }) + + it("readonly fields can be used on free license", async () => { + mocks.licenses.useCloudFree() + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + }, + }) + ) + + const newView: CreateViewRequest = { + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + }, + } + + await config.api.viewV2.create(newView, { + status: 201, + }) }) }) - it("readonly fields must be visible", async () => { + it("display fields must be visible", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { @@ -476,11 +554,11 @@ datasourceDescribe( const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, + primaryDisplay: "name", schema: { id: { visible: true }, name: { visible: false, - readonly: true, }, }, } @@ -489,14 +567,13 @@ datasourceDescribe( status: 400, body: { message: - 'Field "name" must be visible if you want to make it readonly', + 'You can\'t hide "name" because it is the display column.', status: 400, }, }) }) - it("readonly fields can be used on free license", async () => { - mocks.licenses.useCloudFree() + it("display fields can be readonly", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { @@ -515,6 +592,7 @@ datasourceDescribe( const newView: CreateViewRequest = { name: generator.name(), tableId: table._id!, + primaryDisplay: "name", schema: { id: { visible: true }, name: { @@ -528,134 +606,9 @@ datasourceDescribe( status: 201, }) }) - }) - it("display fields must be visible", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, - }) - ) - - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "name", - schema: { - id: { visible: true }, - name: { - visible: false, - }, - }, - } - - await config.api.viewV2.create(newView, { - status: 400, - body: { - message: - 'You can\'t hide "name" because it is the display column.', - status: 400, - }, - }) - }) - - it("display fields can be readonly", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - }, - description: { - name: "description", - type: FieldType.STRING, - }, - }, - }) - ) - - const newView: CreateViewRequest = { - name: generator.name(), - tableId: table._id!, - primaryDisplay: "name", - schema: { - id: { visible: true }, - name: { - visible: true, - readonly: true, - }, - }, - } - - await config.api.viewV2.create(newView, { - status: 201, - }) - }) - - it("can create a view with calculation fields", async () => { - let view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - }, - }) - - expect(Object.keys(view.schema!)).toHaveLength(1) - - let sum = view.schema!.sum as NumericCalculationFieldMetadata - expect(sum).toBeDefined() - expect(sum.calculationType).toEqual(CalculationType.SUM) - expect(sum.field).toEqual("Price") - - view = await config.api.viewV2.get(view.id) - sum = view.schema!.sum as NumericCalculationFieldMetadata - expect(sum).toBeDefined() - expect(sum.calculationType).toEqual(CalculationType.SUM) - expect(sum.field).toEqual("Price") - }) - - it("cannot create a view with calculation fields unless it has the right type", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - "Calculation fields are not allowed in non-calculation views", - }, - } - ) - }) - - it("cannot create a calculation view with more than 5 aggregations", async () => { - await config.api.viewV2.create( - { + it("can create a view with calculation fields", async () => { + let view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), type: ViewV2Type.CALCULATION, @@ -665,622 +618,33 @@ datasourceDescribe( calculationType: CalculationType.SUM, field: "Price", }, - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - countDistinct: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - min: { - visible: true, - calculationType: CalculationType.MIN, - field: "Price", - }, - max: { - visible: true, - calculationType: CalculationType.MAX, - field: "Price", - }, - avg: { - visible: true, - calculationType: CalculationType.AVG, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - "Calculation views can only have a maximum of 5 fields", - }, - } - ) - }) - - it("cannot create a calculation view with duplicate calculations", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - sum2: { - visible: true, - calculationType: CalculationType.SUM, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - 'Duplicate calculation on field "Price", calculation type "sum"', - }, - } - ) - }) - - it("finds duplicate counts", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - 'Duplicate calculation on field "Price", calculation type "count"', - }, - } - ) - }) - - it("finds duplicate count distincts", async () => { - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - }, - }, - { - status: 400, - body: { - message: - 'Duplicate calculation on field "Price", calculation type "count distinct"', - }, - } - ) - }) - - it("does not confuse counts and count distincts in the duplicate check", async () => { - await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "Price", - }, - }, - }) - }) - - it("does not confuse counts on different fields in the duplicate check", async () => { - await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Price", - }, - count2: { - visible: true, - calculationType: CalculationType.COUNT, - field: "Category", - }, - }, - }) - }) - - it("does not get confused when a calculation field shadows a basic one", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - age: { - name: "age", - type: FieldType.NUMBER, - }, }, }) - ) - await config.api.row.bulkImport(table._id!, { - rows: [{ age: 1 }, { age: 2 }, { age: 3 }], + expect(Object.keys(view.schema!)).toHaveLength(1) + + let sum = view.schema!.sum as NumericCalculationFieldMetadata + expect(sum).toBeDefined() + expect(sum.calculationType).toEqual(CalculationType.SUM) + expect(sum.field).toEqual("Price") + + view = await config.api.viewV2.get(view.id) + sum = view.schema!.sum as NumericCalculationFieldMetadata + expect(sum).toBeDefined() + expect(sum.calculationType).toEqual(CalculationType.SUM) + expect(sum.field).toEqual("Price") }) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - age: { - visible: true, - calculationType: CalculationType.SUM, - field: "age", - }, - }, - }) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].age).toEqual(6) - }) - - // We don't allow the creation of tables with most JsonTypes when using - // external datasources. - isInternal && - it("cannot use complex types as group-by fields", async () => { - for (const type of JsonTypes) { - const field = { name: "field", type } as FieldSchema - const table = await config.api.table.save( - saveTableRequest({ schema: { field } }) - ) - await config.api.viewV2.create( - { - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - field: { visible: true }, - }, - }, - { - status: 400, - body: { - message: `Grouping by fields of type "${type}" is not supported`, - }, - } - ) - } - }) - - isInternal && - it("shouldn't trigger a complex type check on a group by field if field is invisible", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - field: { - name: "field", - type: FieldType.JSON, - }, - }, - }) - ) - + it("cannot create a view with calculation fields unless it has the right type", async () => { await config.api.viewV2.create( { tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, schema: { - field: { visible: false }, - }, - }, - { - status: 201, - } - ) - }) - }) - - describe("update", () => { - let view: ViewV2 - let table: Table - - beforeEach(async () => { - table = await config.api.table.save(priceTable()) - - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - }, - }) - }) - - it("can update an existing view data", async () => { - const tableId = table._id! - await config.api.viewV2.update({ - ...view, - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - }) - - const expected: ViewV2 = { - ...view, - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - // Should also update queryUI because query was not previously set. - queryUI: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - logicalOperator: UILogicalOperator.ALL, - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - }, - ], - }, - schema: expect.anything(), - } - - expect((await config.api.table.get(tableId)).views).toEqual({ - [view.name]: expected, - }) - }) - - it("can update all fields", async () => { - const tableId = table._id! - - const updatedData: Required< - Omit - > = { - version: view.version, - id: view.id, - tableId, - name: view.name, - primaryDisplay: "Price", - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "newValue", - }, - ], - sort: { - field: generator.word(), - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - schema: { - id: { visible: true }, - Category: { - visible: false, - }, - Price: { - visible: true, - readonly: true, - }, - }, - } - await config.api.viewV2.update(updatedData) - - const expected: ViewV2 = { - ...updatedData, - // queryUI gets generated from query - queryUI: { - logicalOperator: UILogicalOperator.ALL, - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "newValue", - }, - ], - }, - ], - }, - schema: { - ...table.schema, - id: expect.objectContaining({ - visible: true, - }), - Category: expect.objectContaining({ - visible: false, - }), - Price: expect.objectContaining({ - visible: true, - readonly: true, - }), - }, - } - - expect((await config.api.table.get(tableId)).views).toEqual({ - [view.name]: expected, - }) - }) - - it("can update an existing view name", async () => { - const tableId = table._id! - const newName = generator.guid() - await config.api.viewV2.update({ ...view, name: newName }) - - expect(await config.api.table.get(tableId)).toEqual( - expect.objectContaining({ - views: { - [newName]: { - ...view, - name: newName, - schema: expect.anything(), - }, - }, - }) - ) - }) - - it("cannot update an unexisting views nor edit ids", async () => { - const tableId = table._id! - await config.api.viewV2.update( - { ...view, id: generator.guid() }, - { status: 404 } - ) - - expect(await config.api.table.get(tableId)).toEqual( - expect.objectContaining({ - views: { - [view.name]: { - ...view, - schema: expect.anything(), - }, - }, - }) - ) - }) - - it("cannot update views with the wrong tableId", async () => { - const tableId = table._id! - await config.api.viewV2.update( - { - ...view, - tableId: generator.guid(), - query: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "thatValue", - }, - ], - }, - { status: 404 } - ) - - expect(await config.api.table.get(tableId)).toEqual( - expect.objectContaining({ - views: { - [view.name]: { - ...view, - schema: expect.anything(), - }, - }, - }) - ) - }) - - isInternal && - it("cannot update views v1", async () => { - const viewV1 = await config.api.legacyView.save({ - tableId: table._id!, - name: generator.guid(), - filters: [], - schema: {}, - }) - - await config.api.viewV2.update(viewV1 as unknown as ViewV2, { - status: 400, - body: { - message: "Only views V2 can be updated", - status: 400, - }, - }) - }) - - it("cannot update the a view with unmatching ids between url and body", async () => { - const anotherView = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - }, - }) - const result = await config - .request!.put(`/api/v2/views/${anotherView.id}`) - .send(view) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(400) - - expect(result.body).toEqual({ - message: "View id does not match between the body and the uri path", - status: 400, - }) - }) - - it("updates only UI schema overrides", async () => { - const updatedView = await config.api.viewV2.update({ - ...view, - schema: { - ...view.schema, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - order: 1, - width: 100, - }, - Category: { - name: "Category", - type: FieldType.STRING, - visible: false, - icon: "ic", - }, - } as ViewV2Schema, - }) - - expect(updatedView).toEqual({ - ...view, - schema: { - id: { visible: true }, - Price: { - visible: true, - order: 1, - width: 100, - }, - Category: { visible: false, icon: "ic" }, - }, - id: view.id, - version: 2, - }) - }) - - it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { - await config.api.viewV2.update( - { - ...view, - schema: { - ...view.schema, - Price: { - name: "Price", - type: FieldType.NUMBER, - visible: true, - }, - Category: { - name: "Category", - type: FieldType.STRING, - }, - } as ViewV2Schema, - }, - { - status: 200, - } - ) - }) - - it("cannot update view type after creation", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - }, - }) - - await config.api.viewV2.update( - { - ...view, - type: ViewV2Type.CALCULATION, - }, - { - status: 400, - body: { - message: "Cannot update view type after creation", - }, - } - ) - }) - - isInternal && - it("updating schema will only validate modified field", async () => { - let view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - Price: { - visible: true, - }, - Category: { visible: true }, - }, - }) - - // Update the view to an invalid state - const tableToUpdate = await config.api.table.get(table._id!) - ;(tableToUpdate.views![view.name] as ViewV2).schema!.id.visible = - false - await db.getDB(config.appId!).put(tableToUpdate) - - view = await config.api.viewV2.get(view.id) - await config.api.viewV2.update( - { - ...view, - schema: { - ...view.schema, - Price: { - visible: false, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", }, }, }, @@ -1288,172 +652,196 @@ datasourceDescribe( status: 400, body: { message: - 'You can\'t hide "id" because it is a required field.', - status: 400, + "Calculation fields are not allowed in non-calculation views", }, } ) }) - it("can update queryUI field and query gets regenerated", async () => { - await config.api.viewV2.update({ - ...view, - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], - }, - ], - }, - }) - - let updatedView = await config.api.viewV2.get(view.id) - let expected: SearchFilters = { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { field: "value" }, - }, - ], - }, - }, - ], - }, - } - expect(updatedView.query).toEqual(expected) - - await config.api.viewV2.update({ - ...updatedView, - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "newField", - value: "newValue", - }, - ], - }, - ], - }, - }) - - updatedView = await config.api.viewV2.get(view.id) - expected = { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - $and: { - conditions: [ - { - $and: { - conditions: [ - { - equal: { newField: "newValue" }, - }, - ], - }, - }, - ], - }, - } - expect(updatedView.query).toEqual(expected) - }) - - it("can delete either query and it will get regenerated from queryUI", async () => { - await config.api.viewV2.update({ - ...view, - query: [ + it("cannot create a calculation view with more than 5 aggregations", async () => { + await config.api.viewV2.create( { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], - }) - - let updatedView = await config.api.viewV2.get(view.id) - expect(updatedView.queryUI).toBeDefined() - - await config.api.viewV2.update({ - ...updatedView, - query: undefined, - }) - - updatedView = await config.api.viewV2.get(view.id) - expect(updatedView.query).toBeDefined() - }) - - // This is because the conversion from queryUI -> query loses data, so you - // can't accurately reproduce the original queryUI from the query. If - // query is a LegacyFilter[] we allow it, because for Budibase v3 - // everything in the db had query set to a LegacyFilter[], and there's no - // loss of information converting from a LegacyFilter[] to a - // UISearchFilter. But we convert to a SearchFilters and that can't be - // accurately converted to a UISearchFilter. - it("can't regenerate queryUI from a query once it has been generated from a queryUI", async () => { - await config.api.viewV2.update({ - ...view, - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "field", - value: "value", - }, - ], + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", + }, + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + countDistinct: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + min: { + visible: true, + calculationType: CalculationType.MIN, + field: "Price", + }, + max: { + visible: true, + calculationType: CalculationType.MAX, + field: "Price", + }, + avg: { + visible: true, + calculationType: CalculationType.AVG, + field: "Price", + }, }, - ], - }, + }, + { + status: 400, + body: { + message: + "Calculation views can only have a maximum of 5 fields", + }, + } + ) }) - let updatedView = await config.api.viewV2.get(view.id) - expect(updatedView.query).toBeDefined() - - await config.api.viewV2.update( - { - ...updatedView, - queryUI: undefined, - }, - { - status: 400, - body: { - message: "view is missing queryUI field", + it("cannot create a calculation view with duplicate calculations", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", + }, + sum2: { + visible: true, + calculationType: CalculationType.SUM, + field: "Price", + }, + }, }, - } - ) - }) + { + status: 400, + body: { + message: + 'Duplicate calculation on field "Price", calculation type "sum"', + }, + } + ) + }) - describe("calculation views", () => { - let table: Table - let view: ViewV2 + it("finds duplicate counts", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + }, + }, + { + status: 400, + body: { + message: + 'Duplicate calculation on field "Price", calculation type "count"', + }, + } + ) + }) - beforeEach(async () => { - table = await config.api.table.save( + it("finds duplicate count distincts", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + }, + }, + { + status: 400, + body: { + message: + 'Duplicate calculation on field "Price", calculation type "count distinct"', + }, + } + ) + }) + + it("does not confuse counts and count distincts in the duplicate check", async () => { + await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "Price", + }, + }, + }) + }) + + it("does not confuse counts on different fields in the duplicate check", async () => { + await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Price", + }, + count2: { + visible: true, + calculationType: CalculationType.COUNT, + field: "Category", + }, + }, + }) + }) + + it("does not get confused when a calculation field shadows a basic one", async () => { + const table = await config.api.table.save( saveTableRequest({ schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { - presence: true, - }, - }, - country: { - name: "country", - type: FieldType.STRING, - }, age: { name: "age", type: FieldType.NUMBER, @@ -1462,14 +850,15 @@ datasourceDescribe( }) ) - view = await config.api.viewV2.create({ + await config.api.row.bulkImport(table._id!, { + rows: [{ age: 1 }, { age: 2 }, { age: 3 }], + }) + + const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), type: ViewV2Type.CALCULATION, schema: { - country: { - visible: true, - }, age: { visible: true, calculationType: CalculationType.SUM, @@ -1478,774 +867,1200 @@ datasourceDescribe( }, }) - await config.api.row.bulkImport(table._id!, { - rows: [ + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].age).toEqual(6) + }) + + // We don't allow the creation of tables with most JsonTypes when using + // external datasources. + isInternal && + it("cannot use complex types as group-by fields", async () => { + for (const type of JsonTypes) { + const field = { name: "field", type } as FieldSchema + const table = await config.api.table.save( + saveTableRequest({ schema: { field } }) + ) + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + field: { visible: true }, + }, + }, + { + status: 400, + body: { + message: `Grouping by fields of type "${type}" is not supported`, + }, + } + ) + } + }) + + isInternal && + it("shouldn't trigger a complex type check on a group by field if field is invisible", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + field: { + name: "field", + type: FieldType.JSON, + }, + }, + }) + ) + + await config.api.viewV2.create( { - name: "Steve", - age: 30, - country: "UK", + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + field: { visible: false }, + }, }, { - name: "Jane", - age: 31, - country: "UK", - }, + status: 201, + } + ) + }) + }) + + describe("update", () => { + let view: ViewV2 + let table: Table + + beforeEach(async () => { + table = await config.api.table.save(priceTable()) + + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + }, + }) + }) + + it("can update an existing view data", async () => { + const tableId = table._id! + await config.api.viewV2.update({ + ...view, + query: [ { - name: "Ruari", - age: 32, - country: "USA", - }, - { - name: "Alice", - age: 33, - country: "USA", + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", }, ], }) - }) - it("returns the expected rows prior to modification", async () => { - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(2) - expect(rows).toEqual( - expect.arrayContaining([ + const expected: ViewV2 = { + ...view, + query: [ { - country: "USA", - age: 65, + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", }, - { - country: "UK", - age: 61, - }, - ]) - ) - }) - - it("can remove a group by field", async () => { - delete view.schema!.country - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows).toEqual( - expect.arrayContaining([ - { - age: 126, - }, - ]) - ) - }) - - it("can remove a calculation field", async () => { - delete view.schema!.age - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(4) - - // Because the removal of the calculation field actually makes this - // no longer a calculation view, these rows will now have _id and - // _rev fields. - expect(rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ country: "UK" }), - expect.objectContaining({ country: "UK" }), - expect.objectContaining({ country: "USA" }), - expect.objectContaining({ country: "USA" }), - ]) - ) - }) - - it("can add a new group by field", async () => { - view.schema!.name = { visible: true } - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(4) - expect(rows).toEqual( - expect.arrayContaining([ - { - name: "Steve", - age: 30, - country: "UK", - }, - { - name: "Jane", - age: 31, - country: "UK", - }, - { - name: "Ruari", - age: 32, - country: "USA", - }, - { - name: "Alice", - age: 33, - country: "USA", - }, - ]) - ) - }) - - it("can add a new group by field that is invisible, even if required on the table", async () => { - view.schema!.name = { visible: false } - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(2) - expect(rows).toEqual( - expect.arrayContaining([ - { - country: "USA", - age: 65, - }, - { - country: "UK", - age: 61, - }, - ]) - ) - }) - - it("can add a new calculation field", async () => { - view.schema!.count = { - visible: true, - calculationType: CalculationType.COUNT, - field: "age", - } - await config.api.viewV2.update(view) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(2) - expect(rows).toEqual( - expect.arrayContaining([ - { - country: "USA", - age: 65, - count: 2, - }, - { - country: "UK", - age: 61, - count: 2, - }, - ]) - ) - }) - }) - }) - - describe("delete", () => { - let view: ViewV2 - - beforeAll(async () => { - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - }, - }) - }) - - it("can delete an existing view", async () => { - const tableId = table._id! - const getPersistedView = async () => - (await config.api.table.get(tableId)).views![view.name] - - expect(await getPersistedView()).toBeDefined() - - await config.api.viewV2.delete(view.id) - - expect(await getPersistedView()).toBeUndefined() - }) - }) - - describe.each([ - ["from view api", (view: ViewV2) => config.api.viewV2.get(view.id)], - [ - "from table", - async (view: ViewV2) => { - const table = await config.api.table.get(view.tableId) - return table.views![view.name] as ViewV2 - }, - ], - ])("read (%s)", (_, getDelegate) => { - let table: Table - let tableId: string - - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - one: { - type: FieldType.STRING, - name: "one", - }, - two: { - type: FieldType.STRING, - name: "two", - }, - three: { - type: FieldType.STRING, - name: "three", - }, - }, - }) - ) - tableId = table._id! - }) - - it("retrieves the view data with the enriched schema", async () => { - const view = await config.api.viewV2.create({ - tableId, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - - expect(await getDelegate(view)).toEqual({ - ...view, - schema: { - id: { ...table.schema["id"], visible: true }, - one: { ...table.schema["one"], visible: true }, - two: { ...table.schema["two"], visible: true }, - three: { ...table.schema["three"], visible: false }, - }, - }) - }) - - it("does not include columns removed from the table", async () => { - const view = await config.api.viewV2.create({ - tableId, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - const table = await config.api.table.get(tableId) - const { one: _, ...newSchema } = table.schema - await config.api.table.save({ ...table, schema: newSchema }) - - expect(await getDelegate(view)).toEqual({ - ...view, - schema: { - id: { ...table.schema["id"], visible: true }, - two: { ...table.schema["two"], visible: true }, - three: { ...table.schema["three"], visible: false }, - }, - }) - }) - - it("does not include columns hidden from the table", async () => { - const view = await config.api.viewV2.create({ - tableId, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - const table = await config.api.table.get(tableId) - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - two: { ...table.schema["two"], visible: false }, - }, - }) - - expect(await getDelegate(view)).toEqual({ - ...view, - schema: { - id: { ...table.schema["id"], visible: true }, - one: { ...table.schema["one"], visible: true }, - three: { ...table.schema["three"], visible: false }, - }, - }) - }) - - it("should be able to fetch readonly config after downgrades", async () => { - const res = await config.api.viewV2.create({ - name: generator.name(), - tableId: table._id!, - schema: { - id: { visible: true }, - one: { visible: true, readonly: true }, - }, - }) - - mocks.licenses.useCloudFree() - const view = await getDelegate(res) - expect(view.schema?.one).toEqual( - expect.objectContaining({ visible: true, readonly: true }) - ) - }) - - it("should fill in the queryUI field if it's missing", async () => { - const res = await config.api.viewV2.create({ - name: generator.name(), - tableId: tableId, - query: [ - { - operator: BasicOperator.EQUAL, - field: "one", - value: "1", - }, - ], - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - const table = await config.api.table.get(tableId) - const rawView = table.views![res.name] as ViewV2 - delete rawView.queryUI - - await context.doInAppContext(config.getAppId(), async () => { - const db = context.getAppDB() - - if (!rawDatasource) { - await db.put(table) - } else { - const ds = await config.api.datasource.get(datasource!._id!) - ds.entities![table.name] = table - const updatedDs = { - ...rawDatasource, - _id: ds._id, - _rev: ds._rev, - entities: ds.entities, - } - await db.put(updatedDs) - } - }) - - const view = await getDelegate(res) - const expected: UISearchFilter = { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, - logicalOperator: UILogicalOperator.ALL, - groups: [ - { + ], + // Should also update queryUI because query was not previously set. + queryUI: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, logicalOperator: UILogicalOperator.ALL, - filters: [ + groups: [ { - operator: BasicOperator.EQUAL, - field: "one", - value: "1", + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", + }, + ], }, ], }, - ], - } - expect(view.queryUI).toEqual(expected) - }) - }) + schema: expect.anything(), + } - describe("updating table schema", () => { - describe("existing columns changed to required", () => { - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - id: { - name: "id", - type: FieldType.NUMBER, - autocolumn: true, - }, - name: { - name: "name", - type: FieldType.STRING, - }, - }, - }) - ) - }) - - it("allows updating when no views constrains the field", async () => { - await config.api.viewV2.create({ - name: "view a", - tableId: table._id!, - schema: { - id: { visible: true }, - name: { visible: true }, - }, + expect((await config.api.table.get(tableId)).views).toEqual({ + [view.name]: expected, }) - - table = await config.api.table.get(table._id!) - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: { allowEmpty: false } }, - }, - }, - }, - { status: 200 } - ) }) - it("rejects if field is readonly in any view", async () => { - await config.api.viewV2.create({ - name: "view a", - tableId: table._id!, + it("can update all fields", async () => { + const tableId = table._id! + + const updatedData: Required< + Omit + > = { + version: view.version, + id: view.id, + tableId, + name: view.name, + primaryDisplay: "Price", + query: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "newValue", + }, + ], + sort: { + field: generator.word(), + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, schema: { id: { visible: true }, - name: { + Category: { + visible: false, + }, + Price: { visible: true, readonly: true, }, }, - }) + } + await config.api.viewV2.update(updatedData) - table = await config.api.table.get(table._id!) - await config.api.table.save( - { - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, + const expected: ViewV2 = { + ...updatedData, + // queryUI gets generated from query + queryUI: { + logicalOperator: UILogicalOperator.ALL, + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "newValue", + }, + ], + }, + ], + }, + schema: { + ...table.schema, + id: expect.objectContaining({ + visible: true, + }), + Category: expect.objectContaining({ + visible: false, + }), + Price: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + } + + expect((await config.api.table.get(tableId)).views).toEqual({ + [view.name]: expected, + }) + }) + + it("can update an existing view name", async () => { + const tableId = table._id! + const newName = generator.guid() + await config.api.viewV2.update({ ...view, name: newName }) + + expect(await config.api.table.get(tableId)).toEqual( + expect.objectContaining({ + views: { + [newName]: { + ...view, + name: newName, + schema: expect.anything(), }, }, + }) + ) + }) + + it("cannot update an unexisting views nor edit ids", async () => { + const tableId = table._id! + await config.api.viewV2.update( + { ...view, id: generator.guid() }, + { status: 404 } + ) + + expect(await config.api.table.get(tableId)).toEqual( + expect.objectContaining({ + views: { + [view.name]: { + ...view, + schema: expect.anything(), + }, + }, + }) + ) + }) + + it("cannot update views with the wrong tableId", async () => { + const tableId = table._id! + await config.api.viewV2.update( + { + ...view, + tableId: generator.guid(), + query: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "thatValue", + }, + ], + }, + { status: 404 } + ) + + expect(await config.api.table.get(tableId)).toEqual( + expect.objectContaining({ + views: { + [view.name]: { + ...view, + schema: expect.anything(), + }, + }, + }) + ) + }) + + isInternal && + it("cannot update views v1", async () => { + const viewV1 = await config.api.legacyView.save({ + tableId: table._id!, + name: generator.guid(), + filters: [], + schema: {}, + }) + + await config.api.viewV2.update(viewV1 as unknown as ViewV2, { + status: 400, + body: { + message: "Only views V2 can be updated", + status: 400, + }, + }) + }) + + it("cannot update the a view with unmatching ids between url and body", async () => { + const anotherView = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + }, + }) + const result = await config + .request!.put(`/api/v2/views/${anotherView.id}`) + .send(view) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(400) + + expect(result.body).toEqual({ + message: + "View id does not match between the body and the uri path", + status: 400, + }) + }) + + it("updates only UI schema overrides", async () => { + const updatedView = await config.api.viewV2.update({ + ...view, + schema: { + ...view.schema, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + order: 1, + width: 100, + }, + Category: { + name: "Category", + type: FieldType.STRING, + visible: false, + icon: "ic", + }, + } as ViewV2Schema, + }) + + expect(updatedView).toEqual({ + ...view, + schema: { + id: { visible: true }, + Price: { + visible: true, + order: 1, + width: 100, + }, + Category: { visible: false, icon: "ic" }, + }, + id: view.id, + version: 2, + }) + }) + + it("will not throw an exception if the schema is 'deleting' non UI fields", async () => { + await config.api.viewV2.update( + { + ...view, + schema: { + ...view.schema, + Price: { + name: "Price", + type: FieldType.NUMBER, + visible: true, + }, + Category: { + name: "Category", + type: FieldType.STRING, + }, + } as ViewV2Schema, + }, + { + status: 200, + } + ) + }) + + it("cannot update view type after creation", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + Price: { + visible: true, + }, + }, + }) + + await config.api.viewV2.update( + { + ...view, + type: ViewV2Type.CALCULATION, }, { status: 400, body: { - status: 400, - message: - 'To make field "name" required, this field must be present and writable in views: view a.', + message: "Cannot update view type after creation", }, } ) }) - it("rejects if field is hidden in any view", async () => { - await config.api.viewV2.create({ - name: "view a", - tableId: table._id!, - schema: { id: { visible: true } }, - }) - - table = await config.api.table.get(table._id!) - await config.api.table.save( - { - ...table, + isInternal && + it("updating schema will only validate modified field", async () => { + let view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, + id: { visible: true }, + Price: { + visible: true, + }, + Category: { visible: true }, + }, + }) + + // Update the view to an invalid state + const tableToUpdate = await config.api.table.get(table._id!) + ;(tableToUpdate.views![view.name] as ViewV2).schema!.id.visible = + false + await db.getDB(config.appId!).put(tableToUpdate) + + view = await config.api.viewV2.get(view.id) + await config.api.viewV2.update( + { + ...view, + schema: { + ...view.schema, + Price: { + visible: false, + }, }, }, + { + status: 400, + body: { + message: + 'You can\'t hide "id" because it is a required field.', + status: 400, + }, + } + ) + }) + + it("can update queryUI field and query gets regenerated", async () => { + await config.api.viewV2.update({ + ...view, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], + }, + ], + }, + }) + + let updatedView = await config.api.viewV2.get(view.id) + let expected: SearchFilters = { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { field: "value" }, + }, + ], + }, + }, + ], + }, + } + expect(updatedView.query).toEqual(expected) + + await config.api.viewV2.update({ + ...updatedView, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "newField", + value: "newValue", + }, + ], + }, + ], + }, + }) + + updatedView = await config.api.viewV2.get(view.id) + expected = { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + $and: { + conditions: [ + { + $and: { + conditions: [ + { + equal: { newField: "newValue" }, + }, + ], + }, + }, + ], + }, + } + expect(updatedView.query).toEqual(expected) + }) + + it("can delete either query and it will get regenerated from queryUI", async () => { + await config.api.viewV2.update({ + ...view, + query: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], + }) + + let updatedView = await config.api.viewV2.get(view.id) + expect(updatedView.queryUI).toBeDefined() + + await config.api.viewV2.update({ + ...updatedView, + query: undefined, + }) + + updatedView = await config.api.viewV2.get(view.id) + expect(updatedView.query).toBeDefined() + }) + + // This is because the conversion from queryUI -> query loses data, so you + // can't accurately reproduce the original queryUI from the query. If + // query is a LegacyFilter[] we allow it, because for Budibase v3 + // everything in the db had query set to a LegacyFilter[], and there's no + // loss of information converting from a LegacyFilter[] to a + // UISearchFilter. But we convert to a SearchFilters and that can't be + // accurately converted to a UISearchFilter. + it("can't regenerate queryUI from a query once it has been generated from a queryUI", async () => { + await config.api.viewV2.update({ + ...view, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "field", + value: "value", + }, + ], + }, + ], + }, + }) + + let updatedView = await config.api.viewV2.get(view.id) + expect(updatedView.query).toBeDefined() + + await config.api.viewV2.update( + { + ...updatedView, + queryUI: undefined, }, { status: 400, body: { - status: 400, - message: - 'To make field "name" required, this field must be present and writable in views: view a.', + message: "view is missing queryUI field", }, } ) }) + + describe("calculation views", () => { + let table: Table + let view: ViewV2 + + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { + presence: true, + }, + }, + country: { + name: "country", + type: FieldType.STRING, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + }, + }) + ) + + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + country: { + visible: true, + }, + age: { + visible: true, + calculationType: CalculationType.SUM, + field: "age", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + name: "Steve", + age: 30, + country: "UK", + }, + { + name: "Jane", + age: 31, + country: "UK", + }, + { + name: "Ruari", + age: 32, + country: "USA", + }, + { + name: "Alice", + age: 33, + country: "USA", + }, + ], + }) + }) + + it("returns the expected rows prior to modification", async () => { + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(2) + expect(rows).toEqual( + expect.arrayContaining([ + { + country: "USA", + age: 65, + }, + { + country: "UK", + age: 61, + }, + ]) + ) + }) + + it("can remove a group by field", async () => { + delete view.schema!.country + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows).toEqual( + expect.arrayContaining([ + { + age: 126, + }, + ]) + ) + }) + + it("can remove a calculation field", async () => { + delete view.schema!.age + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(4) + + // Because the removal of the calculation field actually makes this + // no longer a calculation view, these rows will now have _id and + // _rev fields. + expect(rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ country: "UK" }), + expect.objectContaining({ country: "UK" }), + expect.objectContaining({ country: "USA" }), + expect.objectContaining({ country: "USA" }), + ]) + ) + }) + + it("can add a new group by field", async () => { + view.schema!.name = { visible: true } + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(4) + expect(rows).toEqual( + expect.arrayContaining([ + { + name: "Steve", + age: 30, + country: "UK", + }, + { + name: "Jane", + age: 31, + country: "UK", + }, + { + name: "Ruari", + age: 32, + country: "USA", + }, + { + name: "Alice", + age: 33, + country: "USA", + }, + ]) + ) + }) + + it("can add a new group by field that is invisible, even if required on the table", async () => { + view.schema!.name = { visible: false } + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(2) + expect(rows).toEqual( + expect.arrayContaining([ + { + country: "USA", + age: 65, + }, + { + country: "UK", + age: 61, + }, + ]) + ) + }) + + it("can add a new calculation field", async () => { + view.schema!.count = { + visible: true, + calculationType: CalculationType.COUNT, + field: "age", + } + await config.api.viewV2.update(view) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(2) + expect(rows).toEqual( + expect.arrayContaining([ + { + country: "USA", + age: 65, + count: 2, + }, + { + country: "UK", + age: 61, + count: 2, + }, + ]) + ) + }) + }) }) - describe("foreign relationship columns", () => { - const createAuxTable = () => - config.api.table.save( + describe("delete", () => { + let view: ViewV2 + + beforeAll(async () => { + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + }, + }) + }) + + it("can delete an existing view", async () => { + const tableId = table._id! + const getPersistedView = async () => + (await config.api.table.get(tableId)).views![view.name] + + expect(await getPersistedView()).toBeDefined() + + await config.api.viewV2.delete(view.id) + + expect(await getPersistedView()).toBeUndefined() + }) + }) + + describe.each([ + ["from view api", (view: ViewV2) => config.api.viewV2.get(view.id)], + [ + "from table", + async (view: ViewV2) => { + const table = await config.api.table.get(view.tableId) + return table.views![view.name] as ViewV2 + }, + ], + ])("read (%s)", (_, getDelegate) => { + let table: Table + let tableId: string + + beforeEach(async () => { + table = await config.api.table.save( saveTableRequest({ - primaryDisplay: "name", schema: { - name: { name: "name", type: FieldType.STRING }, - age: { name: "age", type: FieldType.NUMBER }, + one: { + type: FieldType.STRING, + name: "one", + }, + two: { + type: FieldType.STRING, + name: "two", + }, + three: { + type: FieldType.STRING, + name: "three", + }, }, }) ) + tableId = table._id! + }) - const createMainTable = async ( - links: { - name: string - tableId: string - fk: string - }[] - ) => { - const table = await config.api.table.save( - saveTableRequest({ - schema: {}, - }) - ) + it("retrieves the view data with the enriched schema", async () => { + const view = await config.api.viewV2.create({ + tableId, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + + expect(await getDelegate(view)).toEqual({ + ...view, + schema: { + id: { ...table.schema["id"], visible: true }, + one: { ...table.schema["one"], visible: true }, + two: { ...table.schema["two"], visible: true }, + three: { ...table.schema["three"], visible: false }, + }, + }) + }) + + it("does not include columns removed from the table", async () => { + const view = await config.api.viewV2.create({ + tableId, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + const table = await config.api.table.get(tableId) + const { one: _, ...newSchema } = table.schema + await config.api.table.save({ ...table, schema: newSchema }) + + expect(await getDelegate(view)).toEqual({ + ...view, + schema: { + id: { ...table.schema["id"], visible: true }, + two: { ...table.schema["two"], visible: true }, + three: { ...table.schema["three"], visible: false }, + }, + }) + }) + + it("does not include columns hidden from the table", async () => { + const view = await config.api.viewV2.create({ + tableId, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + const table = await config.api.table.get(tableId) await config.api.table.save({ ...table, schema: { ...table.schema, - ...links.reduce((acc, c) => { - acc[c.name] = { - name: c.name, - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: c.tableId, - fieldName: c.fk, - constraints: { type: "array" }, - } - return acc - }, {}), + two: { ...table.schema["two"], visible: false }, }, }) - return table - } - const createView = async (tableId: string, schema: ViewV2Schema) => - await config.api.viewV2.create({ - name: generator.guid(), - tableId, - schema, + expect(await getDelegate(view)).toEqual({ + ...view, + schema: { + id: { ...table.schema["id"], visible: true }, + one: { ...table.schema["one"], visible: true }, + three: { ...table.schema["three"], visible: false }, + }, + }) + }) + + it("should be able to fetch readonly config after downgrades", async () => { + const res = await config.api.viewV2.create({ + name: generator.name(), + tableId: table._id!, + schema: { + id: { visible: true }, + one: { visible: true, readonly: true }, + }, }) - const renameColumn = async (table: Table, renaming: RenameColumn) => { - const newSchema = { ...table.schema } - newSchema[renaming.updated] = { - ...table.schema[renaming.old], - name: renaming.updated, + mocks.licenses.useCloudFree() + const view = await getDelegate(res) + expect(view.schema?.one).toEqual( + expect.objectContaining({ visible: true, readonly: true }) + ) + }) + + it("should fill in the queryUI field if it's missing", async () => { + const res = await config.api.viewV2.create({ + name: generator.name(), + tableId: tableId, + query: [ + { + operator: BasicOperator.EQUAL, + field: "one", + value: "1", + }, + ], + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + const table = await config.api.table.get(tableId) + const rawView = table.views![res.name] as ViewV2 + delete rawView.queryUI + + await context.doInAppContext(config.getAppId(), async () => { + const db = context.getAppDB() + + if (!rawDatasource) { + await db.put(table) + } else { + const ds = await config.api.datasource.get(datasource!._id!) + ds.entities![table.name] = table + const updatedDs = { + ...rawDatasource, + _id: ds._id, + _rev: ds._rev, + entities: ds.entities, + } + await db.put(updatedDs) + } + }) + + const view = await getDelegate(res) + const expected: UISearchFilter = { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, + logicalOperator: UILogicalOperator.ALL, + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "one", + value: "1", + }, + ], + }, + ], } - delete newSchema[renaming.old] + expect(view.queryUI).toEqual(expected) + }) + }) - await config.api.table.save({ - ...table, - schema: newSchema, - _rename: renaming, - }) - } - - it("updating a column will update link columns configuration", async () => { - let auxTable = await createAuxTable() - - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - ]) - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const view = await createView(table._id!, { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, - }, - }, - }) - - await renameColumn(auxTable, { old: "age", updated: "dob" }) - - const updatedView = await config.api.viewV2.get(view.id) - expect(updatedView).toEqual( - expect.objectContaining({ - schema: expect.objectContaining({ - aux: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - dob: expect.objectContaining({ - visible: true, - readonly: true, - }), + describe("updating table schema", () => { + describe("existing columns changed to required", () => { + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + id: { + name: "id", + type: FieldType.NUMBER, + autocolumn: true, }, - }), - }), + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + }) + + it("allows updating when no views constrains the field", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { + id: { visible: true }, + name: { visible: true }, + }, }) - ) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: { allowEmpty: false } }, + }, + }, + }, + { status: 200 } + ) + }) + + it("rejects if field is readonly in any view", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { + id: { visible: true }, + name: { + visible: true, + readonly: true, + }, + }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }, + { + status: 400, + body: { + status: 400, + message: + 'To make field "name" required, this field must be present and writable in views: view a.', + }, + } + ) + }) + + it("rejects if field is hidden in any view", async () => { + await config.api.viewV2.create({ + name: "view a", + tableId: table._id!, + schema: { id: { visible: true } }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save( + { + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }, + { + status: 400, + body: { + status: 400, + message: + 'To make field "name" required, this field must be present and writable in views: view a.', + }, + } + ) + }) }) - it("handles multiple fields using the same table", async () => { - let auxTable = await createAuxTable() + describe("foreign relationship columns", () => { + const createAuxTable = () => + config.api.table.save( + saveTableRequest({ + primaryDisplay: "name", + schema: { + name: { name: "name", type: FieldType.STRING }, + age: { name: "age", type: FieldType.NUMBER }, + }, + }) + ) - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - { name: "aux2", tableId: auxTable._id!, fk: "fk_aux2" }, - ]) - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const view = await createView(table._id!, { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, + const createMainTable = async ( + links: { + name: string + tableId: string + fk: string + }[] + ) => { + const table = await config.api.table.save( + saveTableRequest({ + schema: {}, + }) + ) + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + ...links.reduce((acc, c) => { + acc[c.name] = { + name: c.name, + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: c.tableId, + fieldName: c.fk, + constraints: { type: "array" }, + } + return acc + }, {}), }, - }, - aux2: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, - }, - }, - }) - - await renameColumn(auxTable, { old: "age", updated: "dob" }) - - const updatedView = await config.api.viewV2.get(view.id) - expect(updatedView).toEqual( - expect.objectContaining({ - schema: expect.objectContaining({ - aux: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - dob: expect.objectContaining({ - visible: true, - readonly: true, - }), - }, - }), - aux2: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - dob: expect.objectContaining({ - visible: true, - readonly: true, - }), - }, - }), - }), }) - ) - }) - - it("does not rename columns with the same name but from other tables", async () => { - let auxTable = await createAuxTable() - let aux2Table = await createAuxTable() - - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - { name: "aux2", tableId: aux2Table._id!, fk: "fk_aux2" }, - ]) - - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const view = await createView(table._id!, { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - }, - }, - aux2: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - }, - }, - }) - - await renameColumn(auxTable, { old: "name", updated: "fullName" }) - - const updatedView = await config.api.viewV2.get(view.id) - expect(updatedView).toEqual( - expect.objectContaining({ - schema: expect.objectContaining({ - aux: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - fullName: expect.objectContaining({ - visible: true, - readonly: true, - }), - age: expect.objectContaining({ - visible: false, - readonly: false, - }), - }, - }), - aux2: expect.objectContaining({ - columns: { - id: expect.objectContaining({ - visible: false, - readonly: false, - }), - name: expect.objectContaining({ - visible: true, - readonly: true, - }), - age: expect.objectContaining({ - visible: false, - readonly: false, - }), - }, - }), - }), - }) - ) - }) - - it("updates all views references", async () => { - let auxTable = await createAuxTable() - - const table1 = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table1" }, - ]) - const table2 = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table2" }, - ]) - - // Refetch auxTable - auxTable = await config.api.table.get(auxTable._id!) - - const viewSchema = { - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - age: { visible: true, readonly: true }, - }, - }, + return table } - const view1 = await createView(table1._id!, viewSchema) - const view2 = await createView(table1._id!, viewSchema) - const view3 = await createView(table2._id!, viewSchema) - await renameColumn(auxTable, { old: "age", updated: "dob" }) + const createView = async (tableId: string, schema: ViewV2Schema) => + await config.api.viewV2.create({ + name: generator.guid(), + tableId, + schema, + }) + + const renameColumn = async ( + table: Table, + renaming: RenameColumn + ) => { + const newSchema = { ...table.schema } + newSchema[renaming.updated] = { + ...table.schema[renaming.old], + name: renaming.updated, + } + delete newSchema[renaming.old] + + await config.api.table.save({ + ...table, + schema: newSchema, + _rename: renaming, + }) + } + + it("updating a column will update link columns configuration", async () => { + let auxTable = await createAuxTable() + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + ]) + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const view = await createView(table._id!, { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + }) + + await renameColumn(auxTable, { old: "age", updated: "dob" }) - for (const view of [view1, view2, view3]) { const updatedView = await config.api.viewV2.get(view.id) expect(updatedView).toEqual( expect.objectContaining({ @@ -2269,74 +2084,605 @@ datasourceDescribe( }), }) ) - } + }) + + it("handles multiple fields using the same table", async () => { + let auxTable = await createAuxTable() + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + { name: "aux2", tableId: auxTable._id!, fk: "fk_aux2" }, + ]) + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const view = await createView(table._id!, { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + aux2: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + }) + + await renameColumn(auxTable, { old: "age", updated: "dob" }) + + const updatedView = await config.api.viewV2.get(view.id) + expect(updatedView).toEqual( + expect.objectContaining({ + schema: expect.objectContaining({ + aux: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + dob: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + }), + aux2: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + dob: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + }), + }), + }) + ) + }) + + it("does not rename columns with the same name but from other tables", async () => { + let auxTable = await createAuxTable() + let aux2Table = await createAuxTable() + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + { name: "aux2", tableId: aux2Table._id!, fk: "fk_aux2" }, + ]) + + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const view = await createView(table._id!, { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + }, + }, + aux2: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + }, + }, + }) + + await renameColumn(auxTable, { old: "name", updated: "fullName" }) + + const updatedView = await config.api.viewV2.get(view.id) + expect(updatedView).toEqual( + expect.objectContaining({ + schema: expect.objectContaining({ + aux: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + fullName: expect.objectContaining({ + visible: true, + readonly: true, + }), + age: expect.objectContaining({ + visible: false, + readonly: false, + }), + }, + }), + aux2: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + age: expect.objectContaining({ + visible: false, + readonly: false, + }), + }, + }), + }), + }) + ) + }) + + it("updates all views references", async () => { + let auxTable = await createAuxTable() + + const table1 = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table1" }, + ]) + const table2 = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux_table2" }, + ]) + + // Refetch auxTable + auxTable = await config.api.table.get(auxTable._id!) + + const viewSchema = { + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + age: { visible: true, readonly: true }, + }, + }, + } + const view1 = await createView(table1._id!, viewSchema) + const view2 = await createView(table1._id!, viewSchema) + const view3 = await createView(table2._id!, viewSchema) + + await renameColumn(auxTable, { old: "age", updated: "dob" }) + + for (const view of [view1, view2, view3]) { + const updatedView = await config.api.viewV2.get(view.id) + expect(updatedView).toEqual( + expect.objectContaining({ + schema: expect.objectContaining({ + aux: expect.objectContaining({ + columns: { + id: expect.objectContaining({ + visible: false, + readonly: false, + }), + name: expect.objectContaining({ + visible: true, + readonly: true, + }), + dob: expect.objectContaining({ + visible: true, + readonly: true, + }), + }, + }), + }), + }) + ) + } + }) + }) + }) + + describe("calculation views", () => { + it("should not remove calculation columns when modifying table schema", async () => { + let table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + }, + }) + ) + + let view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "age", + }, + }, + }) + + table = await config.api.table.get(table._id!) + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + name: { + name: "name", + type: FieldType.STRING, + constraints: { presence: true }, + }, + }, + }) + + view = await config.api.viewV2.get(view.id) + expect(Object.keys(view.schema!).sort()).toEqual([ + "age", + "id", + "name", + "sum", + ]) + }) + + describe("bigints", () => { + let table: Table + let view: ViewV2 + + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + bigint: { + name: "bigint", + type: FieldType.BIGINT, + }, + }, + }) + ) + + view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "bigint", + }, + }, + }) + }) + + it("should not lose precision handling ints larger than JSs int53", async () => { + // The sum of the following 3 numbers cannot be represented by + // JavaScripts default int53 datatype for numbers, so this is a test + // that makes sure we aren't losing precision between the DB and the + // user. + await config.api.row.bulkImport(table._id!, { + rows: [ + { bigint: "1000000000000000000" }, + { bigint: "123" }, + { bigint: "321" }, + ], + }) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual("1000000000000000444") + }) + + it("should be able to handle up to 2**63 - 1 bigints", async () => { + await config.api.row.bulkImport(table._id!, { + rows: [{ bigint: "9223372036854775806" }, { bigint: "1" }], + }) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual("9223372036854775807") + }) }) }) }) - describe("calculation views", () => { - it("should not remove calculation columns when modifying table schema", async () => { - let table = await config.api.table.save( + describe("row operations", () => { + let table: Table, view: ViewV2 + beforeEach(async () => { + table = await config.api.table.save( saveTableRequest({ schema: { - name: { - name: "name", + one: { type: FieldType.STRING, name: "one" }, + two: { type: FieldType.STRING, name: "two" }, + default: { type: FieldType.STRING, - }, - age: { - name: "age", - type: FieldType.NUMBER, + name: "default", + default: "default", }, }, }) ) - - let view = await config.api.viewV2.create({ + view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "age", - }, + id: { visible: true }, + two: { visible: true }, }, }) - - table = await config.api.table.get(table._id!) - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - name: { - name: "name", - type: FieldType.STRING, - constraints: { presence: true }, - }, - }, - }) - - view = await config.api.viewV2.get(view.id) - expect(Object.keys(view.schema!).sort()).toEqual([ - "age", - "id", - "name", - "sum", - ]) }) - describe("bigints", () => { - let table: Table - let view: ViewV2 + describe("create", () => { + it("should persist a new row with only the provided view fields", async () => { + const newRow = await config.api.row.save(view.id, { + tableId: table!._id, + _viewId: view.id, + one: "foo", + two: "bar", + default: "ohnoes", + }) - beforeEach(async () => { + const row = await config.api.row.get(table._id!, newRow._id!) + expect(row.one).toBeUndefined() + expect(row.two).toEqual("bar") + expect(row.default).toEqual("default") + }) + + it("can't persist readonly columns", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true, readonly: true }, + two: { visible: true }, + }, + }) + const row = await config.api.row.save(view.id, { + tableId: table!._id, + _viewId: view.id, + one: "foo", + two: "bar", + }) + + expect(row.one).toBeUndefined() + expect(row.two).toEqual("bar") + }) + + it("should not return non-view view fields for a row", async () => { + const newRow = await config.api.row.save(view.id, { + one: "foo", + two: "bar", + }) + + expect(newRow.one).toBeUndefined() + expect(newRow.two).toEqual("bar") + }) + + it("should not be possible to create a row in a calculation view", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + await config.api.row.save( + view.id, + { one: "foo" }, + { + status: 400, + body: { + message: "Cannot insert rows through a calculation view", + status: 400, + }, + } + ) + }) + }) + + describe("patch", () => { + it("should not return non-view view fields for a row", async () => { + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + const row = await config.api.row.patch(view.id, { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }) + + expect(row.one).toBeUndefined() + expect(row.two).toEqual("newBar") + }) + + it("should update only the view fields for a row", async () => { + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.patch(view.id, { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }) + + const row = await config.api.row.get(table._id!, newRow._id!) + expect(row.one).toEqual("foo") + expect(row.two).toEqual("newBar") + }) + + it("can't update readonly columns", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + id: { visible: true }, + one: { visible: true, readonly: true }, + two: { visible: true }, + }, + }) + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.patch(view.id, { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }) + + const row = await config.api.row.get(table._id!, newRow._id!) + expect(row.one).toEqual("foo") + expect(row.two).toEqual("newBar") + }) + + it("should not be possible to modify a row in a calculation view", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + const newRow = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + + await config.api.row.patch( + view.id, + { + tableId: table._id!, + _id: newRow._id!, + _rev: newRow._rev!, + one: "newFoo", + two: "newBar", + }, + { + status: 400, + body: { + message: "Cannot update rows through a calculation view", + }, + } + ) + }) + }) + + describe("destroy", () => { + const getRowUsage = async () => { + const { total } = await config.doInContext(undefined, () => + quotas.getCurrentUsageValues( + QuotaUsageType.STATIC, + StaticQuotaName.ROWS + ) + ) + return total + } + + const assertRowUsage = async (expected: number) => { + const usage = await getRowUsage() + expect(usage).toBe(expected) + } + + it("should be able to delete a row", async () => { + const createdRow = await config.api.row.save(table._id!, {}) + const rowUsage = await getRowUsage() + await config.api.row.bulkDelete(view.id, { rows: [createdRow] }) + await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) + await config.api.row.get(table._id!, createdRow._id!, { + status: 404, + }) + }) + + it("should be able to delete multiple rows", async () => { + const rows = await Promise.all([ + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + config.api.row.save(table._id!, {}), + ]) + const rowUsage = await getRowUsage() + + await config.api.row.bulkDelete(view.id, { + rows: [rows[0], rows[2]], + }) + + await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) + + await config.api.row.get(table._id!, rows[0]._id!, { + status: 404, + }) + await config.api.row.get(table._id!, rows[2]._id!, { + status: 404, + }) + await config.api.row.get(table._id!, rows[1]._id!, { status: 200 }) + }) + + it("should not be possible to delete a row in a calculation view", async () => { + const row = await config.api.row.save(table._id!, {}) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + id: { visible: true }, + one: { visible: true }, + }, + }) + + await config.api.row.delete( + view.id, + { _id: row._id! }, + { + status: 400, + body: { + message: "Cannot delete rows through a calculation view", + status: 400, + }, + } + ) + }) + }) + + describe("read", () => { + let view: ViewV2 + let table: Table + + beforeAll(async () => { table = await config.api.table.save( saveTableRequest({ schema: { - bigint: { - name: "bigint", - type: FieldType.BIGINT, + Country: { + type: FieldType.STRING, + name: "Country", + }, + Story: { + type: FieldType.STRING, + name: "Story", }, }, }) @@ -2345,852 +2691,552 @@ datasourceDescribe( view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "bigint", - }, - }, - }) - }) - - it("should not lose precision handling ints larger than JSs int53", async () => { - // The sum of the following 3 numbers cannot be represented by - // JavaScripts default int53 datatype for numbers, so this is a test - // that makes sure we aren't losing precision between the DB and the - // user. - await config.api.row.bulkImport(table._id!, { - rows: [ - { bigint: "1000000000000000000" }, - { bigint: "123" }, - { bigint: "321" }, - ], - }) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual("1000000000000000444") - }) - - it("should be able to handle up to 2**63 - 1 bigints", async () => { - await config.api.row.bulkImport(table._id!, { - rows: [{ bigint: "9223372036854775806" }, { bigint: "1" }], - }) - - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual("9223372036854775807") - }) - }) - }) - }) - - describe("row operations", () => { - let table: Table, view: ViewV2 - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - one: { type: FieldType.STRING, name: "one" }, - two: { type: FieldType.STRING, name: "two" }, - default: { - type: FieldType.STRING, - name: "default", - default: "default", - }, - }, - }) - ) - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - two: { visible: true }, - }, - }) - }) - - describe("create", () => { - it("should persist a new row with only the provided view fields", async () => { - const newRow = await config.api.row.save(view.id, { - tableId: table!._id, - _viewId: view.id, - one: "foo", - two: "bar", - default: "ohnoes", - }) - - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.one).toBeUndefined() - expect(row.two).toEqual("bar") - expect(row.default).toEqual("default") - }) - - it("can't persist readonly columns", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true, readonly: true }, - two: { visible: true }, - }, - }) - const row = await config.api.row.save(view.id, { - tableId: table!._id, - _viewId: view.id, - one: "foo", - two: "bar", - }) - - expect(row.one).toBeUndefined() - expect(row.two).toEqual("bar") - }) - - it("should not return non-view view fields for a row", async () => { - const newRow = await config.api.row.save(view.id, { - one: "foo", - two: "bar", - }) - - expect(newRow.one).toBeUndefined() - expect(newRow.two).toEqual("bar") - }) - - it("should not be possible to create a row in a calculation view", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - await config.api.row.save( - view.id, - { one: "foo" }, - { - status: 400, - body: { - message: "Cannot insert rows through a calculation view", - status: 400, - }, - } - ) - }) - }) - - describe("patch", () => { - it("should not return non-view view fields for a row", async () => { - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - const row = await config.api.row.patch(view.id, { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }) - - expect(row.one).toBeUndefined() - expect(row.two).toEqual("newBar") - }) - - it("should update only the view fields for a row", async () => { - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.patch(view.id, { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }) - - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.one).toEqual("foo") - expect(row.two).toEqual("newBar") - }) - - it("can't update readonly columns", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - one: { visible: true, readonly: true }, - two: { visible: true }, - }, - }) - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.patch(view.id, { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }) - - const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.one).toEqual("foo") - expect(row.two).toEqual("newBar") - }) - - it("should not be possible to modify a row in a calculation view", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - const newRow = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - - await config.api.row.patch( - view.id, - { - tableId: table._id!, - _id: newRow._id!, - _rev: newRow._rev!, - one: "newFoo", - two: "newBar", - }, - { - status: 400, - body: { - message: "Cannot update rows through a calculation view", - }, - } - ) - }) - }) - - describe("destroy", () => { - const getRowUsage = async () => { - const { total } = await config.doInContext(undefined, () => - quotas.getCurrentUsageValues( - QuotaUsageType.STATIC, - StaticQuotaName.ROWS - ) - ) - return total - } - - const assertRowUsage = async (expected: number) => { - const usage = await getRowUsage() - expect(usage).toBe(expected) - } - - it("should be able to delete a row", async () => { - const createdRow = await config.api.row.save(table._id!, {}) - const rowUsage = await getRowUsage() - await config.api.row.bulkDelete(view.id, { rows: [createdRow] }) - await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage) - await config.api.row.get(table._id!, createdRow._id!, { - status: 404, - }) - }) - - it("should be able to delete multiple rows", async () => { - const rows = await Promise.all([ - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - config.api.row.save(table._id!, {}), - ]) - const rowUsage = await getRowUsage() - - await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] }) - - await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage) - - await config.api.row.get(table._id!, rows[0]._id!, { - status: 404, - }) - await config.api.row.get(table._id!, rows[2]._id!, { - status: 404, - }) - await config.api.row.get(table._id!, rows[1]._id!, { status: 200 }) - }) - - it("should not be possible to delete a row in a calculation view", async () => { - const row = await config.api.row.save(table._id!, {}) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - id: { visible: true }, - one: { visible: true }, - }, - }) - - await config.api.row.delete( - view.id, - { _id: row._id! }, - { - status: 400, - body: { - message: "Cannot delete rows through a calculation view", - status: 400, - }, - } - ) - }) - }) - - describe("read", () => { - let view: ViewV2 - let table: Table - - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ schema: { + id: { visible: true }, Country: { - type: FieldType.STRING, - name: "Country", - }, - Story: { - type: FieldType.STRING, - name: "Story", + visible: true, }, }, }) - ) + }) - view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - id: { visible: true }, - Country: { - visible: true, - }, - }, + it("views have extra data trimmed", async () => { + let row = await config.api.row.save(view.id, { + Country: "Aussy", + Story: "aaaaa", + }) + + row = await config.api.row.get(table._id!, row._id!) + expect(row.Story).toBeUndefined() + expect(row.Country).toEqual("Aussy") }) }) - it("views have extra data trimmed", async () => { - let row = await config.api.row.save(view.id, { - Country: "Aussy", - Story: "aaaaa", - }) - - row = await config.api.row.get(table._id!, row._id!) - expect(row.Story).toBeUndefined() - expect(row.Country).toEqual("Aussy") - }) - }) - - describe("search", () => { - it("returns empty rows from view when no schema is passed", async () => { - const rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) + describe("search", () => { + it("returns empty rows from view when no schema is passed", async () => { + const rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) ) - ) - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(10) - expect(response).toEqual({ - rows: expect.arrayContaining( - rows.map(r => ({ - _viewId: view.id, - tableId: table._id, - id: r.id, - _id: r._id, - _rev: r._rev, - ...(isInternal - ? { - type: "row", - updatedAt: expect.any(String), - createdAt: expect.any(String), - } - : {}), - })) - ), - ...(isInternal - ? {} - : { - hasNextPage: false, - }), - }) - }) - - it("searching respects the view filters", async () => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - const two = await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "two", - value: "bar2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(1) - expect(response).toEqual({ - rows: expect.arrayContaining([ - { - _viewId: view.id, - tableId: table._id, - id: two.id, - two: two.two, - _id: two._id, - _rev: two._rev, - ...(isInternal - ? { - type: "row", - createdAt: expect.any(String), - updatedAt: expect.any(String), - } - : {}), - }, - ]), - ...(isInternal - ? {} - : { - hasNextPage: false, - }), - }) - }) - - it("views filters are respected even if the column is hidden", async () => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - const two = await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "two", - value: "bar2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: false }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual([ - expect.objectContaining({ _id: two._id }), - ]) - }) - - it("views without data can be returned", async () => { - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toHaveLength(0) - }) - - it("respects the limit parameter", async () => { - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) - ) - ) - const limit = generator.integer({ min: 1, max: 8 }) - const response = await config.api.viewV2.search(view.id, { - limit, - query: {}, - }) - expect(response.rows).toHaveLength(limit) - }) - - it("can handle pagination", async () => { - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) - ) - ) - const rows = (await config.api.viewV2.search(view.id)).rows - - const page1 = await config.api.viewV2.search(view.id, { - paginate: true, - limit: 4, - query: {}, - countRows: true, - }) - expect(page1).toEqual({ - rows: expect.arrayContaining(rows.slice(0, 4)), - hasNextPage: true, - bookmark: expect.anything(), - totalRows: 10, - }) - - const page2 = await config.api.viewV2.search(view.id, { - paginate: true, - limit: 4, - bookmark: page1.bookmark, - query: {}, - countRows: true, - }) - expect(page2).toEqual({ - rows: expect.arrayContaining(rows.slice(4, 8)), - hasNextPage: true, - bookmark: expect.anything(), - totalRows: 10, - }) - - const page3 = await config.api.viewV2.search(view.id, { - paginate: true, - limit: 4, - bookmark: page2.bookmark, - query: {}, - countRows: true, - }) - const expectation: SearchResponse = { - rows: expect.arrayContaining(rows.slice(8)), - hasNextPage: false, - totalRows: 10, - } - expect(page3).toEqual(expectation) - }) - - const sortTestOptions: [ - { - field: string - order?: SortOrder - type?: SortType - }, - string[] - ][] = [ - [ - { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - }, - ["Alice", "Bob", "Charly", "Danny"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "name", - order: SortOrder.DESCENDING, - type: SortType.STRING, - }, - ["Danny", "Charly", "Bob", "Alice"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - type: SortType.NUMBER, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.ASCENDING, - }, - ["Danny", "Alice", "Charly", "Bob"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - [ - { - field: "age", - order: SortOrder.DESCENDING, - type: SortType.NUMBER, - }, - ["Bob", "Charly", "Alice", "Danny"], - ], - ] - - describe("sorting", () => { - let table: Table - const viewSchema = { - id: { visible: true }, - age: { visible: true }, - name: { visible: true }, - } - - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - type: "table", - schema: { - name: { - type: FieldType.STRING, - name: "name", - }, - surname: { - type: FieldType.STRING, - name: "surname", - }, - age: { - type: FieldType.NUMBER, - name: "age", - }, - address: { - type: FieldType.STRING, - name: "address", - }, - jobTitle: { - type: FieldType.STRING, - name: "jobTitle", - }, - }, - }) - ) - - const users = [ - { name: "Alice", age: 25 }, - { name: "Bob", age: 30 }, - { name: "Charly", age: 27 }, - { name: "Danny", age: 15 }, - ] - await Promise.all( - users.map(u => - config.api.row.save(table._id!, { + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(10) + expect(response).toEqual({ + rows: expect.arrayContaining( + rows.map(r => ({ + _viewId: view.id, tableId: table._id, - ...u, + id: r.id, + _id: r._id, + _rev: r._rev, + ...(isInternal + ? { + type: "row", + updatedAt: expect.any(String), + createdAt: expect.any(String), + } + : {}), + })) + ), + ...(isInternal + ? {} + : { + hasNextPage: false, + }), + }) + }) + + it("searching respects the view filters", async () => { + await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + const two = await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "two", + value: "bar2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: true }, + }, + }) + + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(1) + expect(response).toEqual({ + rows: expect.arrayContaining([ + { + _viewId: view.id, + tableId: table._id, + id: two.id, + two: two.two, + _id: two._id, + _rev: two._rev, + ...(isInternal + ? { + type: "row", + createdAt: expect.any(String), + updatedAt: expect.any(String), + } + : {}), + }, + ]), + ...(isInternal + ? {} + : { + hasNextPage: false, + }), + }) + }) + + it("views filters are respected even if the column is hidden", async () => { + await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + const two = await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "two", + value: "bar2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: false }, + }, + }) + + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual([ + expect.objectContaining({ _id: two._id }), + ]) + }) + + it("views without data can be returned", async () => { + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toHaveLength(0) + }) + + it("respects the limit parameter", async () => { + await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) + ) + const limit = generator.integer({ min: 1, max: 8 }) + const response = await config.api.viewV2.search(view.id, { + limit, + query: {}, + }) + expect(response.rows).toHaveLength(limit) + }) + + it("can handle pagination", async () => { + await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) + ) + const rows = (await config.api.viewV2.search(view.id)).rows + + const page1 = await config.api.viewV2.search(view.id, { + paginate: true, + limit: 4, + query: {}, + countRows: true, + }) + expect(page1).toEqual({ + rows: expect.arrayContaining(rows.slice(0, 4)), + hasNextPage: true, + bookmark: expect.anything(), + totalRows: 10, + }) + + const page2 = await config.api.viewV2.search(view.id, { + paginate: true, + limit: 4, + bookmark: page1.bookmark, + query: {}, + countRows: true, + }) + expect(page2).toEqual({ + rows: expect.arrayContaining(rows.slice(4, 8)), + hasNextPage: true, + bookmark: expect.anything(), + totalRows: 10, + }) + + const page3 = await config.api.viewV2.search(view.id, { + paginate: true, + limit: 4, + bookmark: page2.bookmark, + query: {}, + countRows: true, + }) + const expectation: SearchResponse = { + rows: expect.arrayContaining(rows.slice(8)), + hasNextPage: false, + totalRows: 10, + } + expect(page3).toEqual(expectation) + }) + + const sortTestOptions: [ + { + field: string + order?: SortOrder + type?: SortType + }, + string[] + ][] = [ + [ + { + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, + }, + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + }, + ["Alice", "Bob", "Charly", "Danny"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "name", + order: SortOrder.DESCENDING, + type: SortType.STRING, + }, + ["Danny", "Charly", "Bob", "Alice"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + type: SortType.NUMBER, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.ASCENDING, + }, + ["Danny", "Alice", "Charly", "Bob"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + [ + { + field: "age", + order: SortOrder.DESCENDING, + type: SortType.NUMBER, + }, + ["Bob", "Charly", "Alice", "Danny"], + ], + ] + + describe("sorting", () => { + let table: Table + const viewSchema = { + id: { visible: true }, + age: { visible: true }, + name: { visible: true }, + } + + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + type: "table", + schema: { + name: { + type: FieldType.STRING, + name: "name", + }, + surname: { + type: FieldType.STRING, + name: "surname", + }, + age: { + type: FieldType.NUMBER, + name: "age", + }, + address: { + type: FieldType.STRING, + name: "address", + }, + jobTitle: { + type: FieldType.STRING, + name: "jobTitle", + }, + }, }) ) + + const users = [ + { name: "Alice", age: 25 }, + { name: "Bob", age: 30 }, + { name: "Charly", age: 27 }, + { name: "Danny", age: 15 }, + ] + await Promise.all( + users.map(u => + config.api.row.save(table._id!, { + tableId: table._id, + ...u, + }) + ) + ) + }) + + it.each(sortTestOptions)( + "allow sorting (%s)", + async (sortParams, expected) => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + sort: sortParams, + schema: viewSchema, + }) + + const response = await config.api.viewV2.search(view.id) + + expect(response.rows).toHaveLength(4) + expect(response.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) + } + ) + + it.each(sortTestOptions)( + "allow override the default view sorting (%s)", + async (sortParams, expected) => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + sort: { + field: "name", + order: SortOrder.ASCENDING, + type: SortType.STRING, + }, + schema: viewSchema, + }) + + const response = await config.api.viewV2.search(view.id, { + sort: sortParams.field, + sortOrder: sortParams.order, + sortType: sortParams.type, + query: {}, + }) + + expect(response.rows).toHaveLength(4) + expect(response.rows).toEqual( + expected.map(name => expect.objectContaining({ name })) + ) + } ) }) - it.each(sortTestOptions)( - "allow sorting (%s)", - async (sortParams, expected) => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - sort: sortParams, - schema: viewSchema, + it("can query on top of the view filters", async () => { + await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + const three = await config.api.row.save(table._id!, { + one: "foo3", + two: "bar3", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.NOT_EQUAL, + field: "one", + value: "foo2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: true }, + two: { visible: true }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: { + [BasicOperator.EQUAL]: { + two: "bar3", + }, + [BasicOperator.NOT_EMPTY]: { + two: null, + }, + }, + }) + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ _id: three._id }), + ]) + ) + }) + + it("can query on top of the view filters (using or filters)", async () => { + const one = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", + }) + const three = await config.api.row.save(table._id!, { + one: "foo3", + two: "bar3", + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.NOT_EQUAL, + field: "one", + value: "foo2", + }, + ], + }, + ], + }, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: true }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: { + allOr: true, + [BasicOperator.NOT_EQUAL]: { + two: "bar", + }, + [BasicOperator.NOT_EMPTY]: { + two: null, + }, + }, + }) + expect(response.rows).toHaveLength(2) + expect(response.rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ _id: one._id }), + expect.objectContaining({ _id: three._id }), + ]) + ) + }) + + it.each([true, false])( + "can filter a view without a view filter", + async allOr => { + const one = await config.api.row.save(table._id!, { + one: "foo", + two: "bar", + }) + await config.api.row.save(table._id!, { + one: "foo2", + two: "bar2", }) - const response = await config.api.viewV2.search(view.id) - - expect(response.rows).toHaveLength(4) - expect(response.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) - } - ) - - it.each(sortTestOptions)( - "allow override the default view sorting (%s)", - async (sortParams, expected) => { const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - sort: { - field: "name", - order: SortOrder.ASCENDING, - type: SortType.STRING, + schema: { + id: { visible: true }, + one: { visible: false }, + two: { visible: true }, }, - schema: viewSchema, }) const response = await config.api.viewV2.search(view.id, { - sort: sortParams.field, - sortOrder: sortParams.order, - sortType: sortParams.type, - query: {}, + query: { + allOr, + equal: { + two: "bar", + }, + }, }) - - expect(response.rows).toHaveLength(4) - expect(response.rows).toEqual( - expected.map(name => expect.objectContaining({ name })) - ) + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual([ + expect.objectContaining({ _id: one._id }), + ]) } ) - }) - it("can query on top of the view filters", async () => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - const three = await config.api.row.save(table._id!, { - one: "foo3", - two: "bar3", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.NOT_EQUAL, - field: "one", - value: "foo2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: true }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: { - [BasicOperator.EQUAL]: { - two: "bar3", - }, - [BasicOperator.NOT_EMPTY]: { - two: null, - }, - }, - }) - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ _id: three._id }), - ]) - ) - }) - - it("can query on top of the view filters (using or filters)", async () => { - const one = await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", - }) - const three = await config.api.row.save(table._id!, { - one: "foo3", - two: "bar3", - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.NOT_EQUAL, - field: "one", - value: "foo2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: { - allOr: true, - [BasicOperator.NOT_EQUAL]: { - two: "bar", - }, - [BasicOperator.NOT_EMPTY]: { - two: null, - }, - }, - }) - expect(response.rows).toHaveLength(2) - expect(response.rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ _id: one._id }), - expect.objectContaining({ _id: three._id }), - ]) - ) - }) - - it.each([true, false])( - "can filter a view without a view filter", - async allOr => { - const one = await config.api.row.save(table._id!, { + it.each([true, false])("cannot bypass a view filter", async allOr => { + await config.api.row.save(table._id!, { one: "foo", two: "bar", }) @@ -3202,6 +3248,19 @@ datasourceDescribe( const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "two", + value: "bar2", + }, + ], + }, + ], + }, schema: { id: { visible: true }, one: { visible: false }, @@ -3217,408 +3276,330 @@ datasourceDescribe( }, }, }) - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual([ - expect.objectContaining({ _id: one._id }), - ]) - } - ) - - it.each([true, false])("cannot bypass a view filter", async allOr => { - await config.api.row.save(table._id!, { - one: "foo", - two: "bar", - }) - await config.api.row.save(table._id!, { - one: "foo2", - two: "bar2", + expect(response.rows).toHaveLength(0) }) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "two", - value: "bar2", - }, - ], - }, - ], - }, - schema: { - id: { visible: true }, - one: { visible: false }, - two: { visible: true }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: { - allOr, - equal: { - two: "bar", - }, - }, - }) - expect(response.rows).toHaveLength(0) - }) - - describe("foreign relationship columns", () => { - const createMainTable = async ( - links: { - name: string - tableId: string - fk: string - }[] - ) => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { title: { name: "title", type: FieldType.STRING } }, - }) - ) - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - ...links.reduce((acc, c) => { - acc[c.name] = { - name: c.name, - relationshipType: RelationshipType.ONE_TO_MANY, - type: FieldType.LINK, - tableId: c.tableId, - fieldName: c.fk, - constraints: { type: "array" }, - } - return acc - }, {}), - }, - }) - return table - } - const createAuxTable = (schema: TableSchema) => - config.api.table.save( - saveTableRequest({ - primaryDisplay: "name", - schema: { - ...schema, - name: { name: "name", type: FieldType.STRING }, - }, - }) - ) - - it("returns squashed fields respecting the view config", async () => { - const auxTable = await createAuxTable({ - age: { name: "age", type: FieldType.NUMBER }, - }) - const auxRow = await config.api.row.save(auxTable._id!, { - name: generator.name(), - age: generator.age(), - }) - - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - ]) - await config.api.row.save(table._id!, { - title: generator.word(), - aux: [auxRow], - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - title: { visible: true }, - aux: { - visible: true, - columns: { - name: { visible: false, readonly: false }, - age: { visible: true, readonly: true }, - }, - }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - expect(response.rows).toEqual([ - expect.objectContaining({ - aux: [ - { - _id: auxRow._id, - primaryDisplay: auxRow.name, - age: auxRow.age, - }, - ], - }), - ]) - }) - - it("enriches squashed fields", async () => { - const auxTable = await createAuxTable({ - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - constraints: { presence: true }, - }, - }) - const table = await createMainTable([ - { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, - ]) - - const user = config.getUser() - const auxRow = await config.api.row.save(auxTable._id!, { - name: generator.name(), - user: user._id, - }) - await config.api.row.save(table._id!, { - title: generator.word(), - aux: [auxRow], - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - schema: { - title: { visible: true }, - aux: { - visible: true, - columns: { - name: { visible: true, readonly: true }, - user: { visible: true, readonly: true }, - }, - }, - }, - }) - - const response = await config.api.viewV2.search(view.id) - - expect(response.rows).toEqual([ - expect.objectContaining({ - aux: [ - { - _id: auxRow._id, - primaryDisplay: auxRow.name, - name: auxRow.name, - user: { - _id: user._id, - email: user.email, - firstName: user.firstName, - lastName: user.lastName, - primaryDisplay: user.email, - }, - }, - ], - }), - ]) - }) - }) - - describe("calculations", () => { - let table: Table - let rows: Row[] - - beforeAll(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - rows = await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, { - quantity: generator.natural({ min: 1, max: 10 }), - price: generator.natural({ min: 1, max: 10 }), + describe("foreign relationship columns", () => { + const createMainTable = async ( + links: { + name: string + tableId: string + fk: string + }[] + ) => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { title: { name: "title", type: FieldType.STRING } }, }) ) - ) - }) - - it("should be able to search by calculations", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - type: ViewV2Type.CALCULATION, - name: generator.guid(), - schema: { - "Quantity Sum": { - visible: true, - calculationType: CalculationType.SUM, - field: "quantity", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - expect(response.rows).toHaveLength(1) - expect(response.rows).toEqual( - expect.arrayContaining([ - expect.objectContaining({ - "Quantity Sum": rows.reduce((acc, r) => acc + r.quantity, 0), - }), - ]) - ) - - // Calculation views do not return rows that can be linked back to - // the source table, and so should not have an _id field. - for (const row of response.rows) { - expect("_id" in row).toBe(false) - } - }) - - it("should be able to group by a basic field", async () => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { - visible: true, - field: "quantity", - }, - "Total Price": { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - const priceByQuantity: Record = {} - for (const row of rows) { - priceByQuantity[row.quantity] ??= 0 - priceByQuantity[row.quantity] += row.price - } - - for (const row of response.rows) { - expect(row["Total Price"]).toEqual(priceByQuantity[row.quantity]) - } - }) - - it.each([ - CalculationType.COUNT, - CalculationType.SUM, - CalculationType.AVG, - CalculationType.MIN, - CalculationType.MAX, - ])("should be able to calculate $type", async type => { - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - aggregate: { - visible: true, - calculationType: type, - field: "price", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - function calculate( - type: CalculationType, - numbers: number[] - ): number { - switch (type) { - case CalculationType.COUNT: - return numbers.length - case CalculationType.SUM: - return numbers.reduce((a, b) => a + b, 0) - case CalculationType.AVG: - return numbers.reduce((a, b) => a + b, 0) / numbers.length - case CalculationType.MIN: - return Math.min(...numbers) - case CalculationType.MAX: - return Math.max(...numbers) - } - } - - const prices = rows.map(row => row.price) - const expected = calculate(type, prices) - const actual = response.rows[0].aggregate - - if (type === CalculationType.AVG) { - // The average calculation can introduce floating point rounding - // errors, so we need to compare to within a small margin of - // error. - expect(actual).toBeCloseTo(expected) - } else { - expect(actual).toEqual(expected) - } - }) - - it("should be able to do a COUNT(DISTINCT)", async () => { - const table = await config.api.table.save( - saveTableRequest({ + await config.api.table.save({ + ...table, schema: { - name: { - name: "name", - type: FieldType.STRING, + ...table.schema, + ...links.reduce((acc, c) => { + acc[c.name] = { + name: c.name, + relationshipType: RelationshipType.ONE_TO_MANY, + type: FieldType.LINK, + tableId: c.tableId, + fieldName: c.fk, + constraints: { type: "array" }, + } + return acc + }, {}), + }, + }) + return table + } + const createAuxTable = (schema: TableSchema) => + config.api.table.save( + saveTableRequest({ + primaryDisplay: "name", + schema: { + ...schema, + name: { name: "name", type: FieldType.STRING }, + }, + }) + ) + + it("returns squashed fields respecting the view config", async () => { + const auxTable = await createAuxTable({ + age: { name: "age", type: FieldType.NUMBER }, + }) + const auxRow = await config.api.row.save(auxTable._id!, { + name: generator.name(), + age: generator.age(), + }) + + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + ]) + await config.api.row.save(table._id!, { + title: generator.word(), + aux: [auxRow], + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + title: { visible: true }, + aux: { + visible: true, + columns: { + name: { visible: false, readonly: false }, + age: { visible: true, readonly: true }, + }, }, }, }) - ) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - distinct: true, - field: "name", - }, - }, + const response = await config.api.viewV2.search(view.id) + expect(response.rows).toEqual([ + expect.objectContaining({ + aux: [ + { + _id: auxRow._id, + primaryDisplay: auxRow.name, + age: auxRow.age, + }, + ], + }), + ]) }) - await config.api.row.bulkImport(table._id!, { - rows: [ - { - name: "John", + it("enriches squashed fields", async () => { + const auxTable = await createAuxTable({ + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + constraints: { presence: true }, }, - { - name: "John", - }, - { - name: "Sue", - }, - ], - }) + }) + const table = await createMainTable([ + { name: "aux", tableId: auxTable._id!, fk: "fk_aux" }, + ]) - const { rows } = await config.api.row.search(view.id) - expect(rows).toHaveLength(1) - expect(rows[0].count).toEqual(2) + const user = config.getUser() + const auxRow = await config.api.row.save(auxTable._id!, { + name: generator.name(), + user: user._id, + }) + await config.api.row.save(table._id!, { + title: generator.word(), + aux: [auxRow], + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + schema: { + title: { visible: true }, + aux: { + visible: true, + columns: { + name: { visible: true, readonly: true }, + user: { visible: true, readonly: true }, + }, + }, + }, + }) + + const response = await config.api.viewV2.search(view.id) + + expect(response.rows).toEqual([ + expect.objectContaining({ + aux: [ + { + _id: auxRow._id, + primaryDisplay: auxRow.name, + name: auxRow.name, + user: { + _id: user._id, + email: user.email, + firstName: user.firstName, + lastName: user.lastName, + primaryDisplay: user.email, + }, + }, + ], + }), + ]) + }) }) - it("should not be able to COUNT(DISTINCT ...) against a non-existent field", async () => { - await config.api.viewV2.create( - { + describe("calculations", () => { + let table: Table + let rows: Row[] + + beforeAll(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + rows = await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, { + quantity: generator.natural({ min: 1, max: 10 }), + price: generator.natural({ min: 1, max: 10 }), + }) + ) + ) + }) + + it("should be able to search by calculations", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + type: ViewV2Type.CALCULATION, + name: generator.guid(), + schema: { + "Quantity Sum": { + visible: true, + calculationType: CalculationType.SUM, + field: "quantity", + }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: {}, + }) + + expect(response.rows).toHaveLength(1) + expect(response.rows).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + "Quantity Sum": rows.reduce( + (acc, r) => acc + r.quantity, + 0 + ), + }), + ]) + ) + + // Calculation views do not return rows that can be linked back to + // the source table, and so should not have an _id field. + for (const row of response.rows) { + expect("_id" in row).toBe(false) + } + }) + + it("should be able to group by a basic field", async () => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { + visible: true, + field: "quantity", + }, + "Total Price": { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: {}, + }) + + const priceByQuantity: Record = {} + for (const row of rows) { + priceByQuantity[row.quantity] ??= 0 + priceByQuantity[row.quantity] += row.price + } + + for (const row of response.rows) { + expect(row["Total Price"]).toEqual( + priceByQuantity[row.quantity] + ) + } + }) + + it.each([ + CalculationType.COUNT, + CalculationType.SUM, + CalculationType.AVG, + CalculationType.MIN, + CalculationType.MAX, + ])("should be able to calculate $type", async type => { + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + aggregate: { + visible: true, + calculationType: type, + field: "price", + }, + }, + }) + + const response = await config.api.viewV2.search(view.id, { + query: {}, + }) + + function calculate( + type: CalculationType, + numbers: number[] + ): number { + switch (type) { + case CalculationType.COUNT: + return numbers.length + case CalculationType.SUM: + return numbers.reduce((a, b) => a + b, 0) + case CalculationType.AVG: + return numbers.reduce((a, b) => a + b, 0) / numbers.length + case CalculationType.MIN: + return Math.min(...numbers) + case CalculationType.MAX: + return Math.max(...numbers) + } + } + + const prices = rows.map(row => row.price) + const expected = calculate(type, prices) + const actual = response.rows[0].aggregate + + if (type === CalculationType.AVG) { + // The average calculation can introduce floating point rounding + // errors, so we need to compare to within a small margin of + // error. + expect(actual).toBeCloseTo(expected) + } else { + expect(actual).toEqual(expected) + } + }) + + it("should be able to do a COUNT(DISTINCT)", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), type: ViewV2Type.CALCULATION, @@ -3627,71 +3608,474 @@ datasourceDescribe( visible: true, calculationType: CalculationType.COUNT, distinct: true, - field: "does not exist oh no", - }, - }, - }, - { - status: 400, - body: { - message: - 'Calculation field "count" references field "does not exist oh no" which does not exist in the table schema', - }, - } - ) - }) - - it("should be able to filter on relationships", async () => { - const companies = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, + field: "name", }, }, }) - ) - const employees = await config.api.table.save( - saveTableRequest({ - schema: { - age: { - type: FieldType.NUMBER, - name: "age", - }, - name: { - type: FieldType.STRING, - name: "name", - }, - company: { - type: FieldType.LINK, - name: "company", - tableId: companies._id!, - relationshipType: RelationshipType.ONE_TO_MANY, - fieldName: "company", - }, - }, - }) - ) - - const view = await config.api.viewV2.create({ - tableId: employees._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - queryUI: { - groups: [ + await config.api.row.bulkImport(table._id!, { + rows: [ { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "company.name", - value: "Aperture Science Laboratories", - }, - ], + name: "John", + }, + { + name: "John", + }, + { + name: "Sue", }, ], - }, + }) + + const { rows } = await config.api.row.search(view.id) + expect(rows).toHaveLength(1) + expect(rows[0].count).toEqual(2) + }) + + it("should not be able to COUNT(DISTINCT ...) against a non-existent field", async () => { + await config.api.viewV2.create( + { + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + distinct: true, + field: "does not exist oh no", + }, + }, + }, + { + status: 400, + body: { + message: + 'Calculation field "count" references field "does not exist oh no" which does not exist in the table schema', + }, + } + ) + }) + + it("should be able to filter on relationships", async () => { + const companies = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + }, + }, + }) + ) + + const employees = await config.api.table.save( + saveTableRequest({ + schema: { + age: { + type: FieldType.NUMBER, + name: "age", + }, + name: { + type: FieldType.STRING, + name: "name", + }, + company: { + type: FieldType.LINK, + name: "company", + tableId: companies._id!, + relationshipType: RelationshipType.ONE_TO_MANY, + fieldName: "company", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: employees._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "company.name", + value: "Aperture Science Laboratories", + }, + ], + }, + ], + }, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "age", + }, + }, + }) + + const apertureScience = await config.api.row.save( + companies._id!, + { + name: "Aperture Science Laboratories", + } + ) + + const blackMesa = await config.api.row.save(companies._id!, { + name: "Black Mesa", + }) + + await Promise.all([ + config.api.row.save(employees._id!, { + name: "Alice", + age: 25, + company: apertureScience._id, + }), + config.api.row.save(employees._id!, { + name: "Bob", + age: 30, + company: apertureScience._id, + }), + config.api.row.save(employees._id!, { + name: "Charly", + age: 27, + company: blackMesa._id, + }), + config.api.row.save(employees._id!, { + name: "Danny", + age: 15, + company: blackMesa._id, + }), + ]) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + }) + + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual(55) + }) + + it("should be able to count non-numeric fields", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + firstName: { + type: FieldType.STRING, + name: "firstName", + }, + lastName: { + type: FieldType.STRING, + name: "lastName", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + count: { + visible: true, + calculationType: CalculationType.COUNT, + field: "firstName", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { firstName: "Jane", lastName: "Smith" }, + { firstName: "Jane", lastName: "Doe" }, + { firstName: "Alice", lastName: "Smith" }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + }) + + expect(rows).toHaveLength(1) + expect(rows[0].count).toEqual(3) + }) + + it("should be able to filter rows on the view itself", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + queryUI: { + groups: [ + { + filters: [ + { + operator: BasicOperator.EQUAL, + field: "quantity", + value: 1, + }, + ], + }, + ], + }, + schema: { + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + }) + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual(3) + }) + + it("should be able to filter on group by fields", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { visible: true }, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: { + equal: { + quantity: 1, + }, + }, + }) + + expect(rows).toHaveLength(1) + expect(rows[0].sum).toEqual(3) + }) + + it("should be able to sort by group by field", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { visible: true }, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + sort: "quantity", + sortOrder: SortOrder.DESCENDING, + }) + + expect(rows).toEqual([ + expect.objectContaining({ quantity: 2, sum: 10 }), + expect.objectContaining({ quantity: 1, sum: 3 }), + ]) + }) + + it("should be able to sort by a calculation", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + quantity: { + type: FieldType.NUMBER, + name: "quantity", + }, + price: { + type: FieldType.NUMBER, + name: "price", + }, + }, + }) + ) + + await config.api.row.bulkImport(table._id!, { + rows: [ + { + quantity: 1, + price: 1, + }, + { + quantity: 1, + price: 2, + }, + { + quantity: 2, + price: 10, + }, + ], + }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, + schema: { + quantity: { visible: true }, + sum: { + visible: true, + calculationType: CalculationType.SUM, + field: "price", + }, + }, + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + sort: "sum", + sortOrder: SortOrder.DESCENDING, + }) + + expect(rows).toEqual([ + expect.objectContaining({ quantity: 2, sum: 10 }), + expect.objectContaining({ quantity: 1, sum: 3 }), + ]) + }) + }) + + it("should not need required fields to be present", async () => { + const table = await config.api.table.save( + saveTableRequest({ + schema: { + name: { + name: "name", + type: FieldType.STRING, + constraints: { + presence: true, + }, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + }, + }) + ) + + await Promise.all([ + config.api.row.save(table._id!, { name: "Steve", age: 30 }), + config.api.row.save(table._id!, { name: "Jane", age: 31 }), + ]) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + type: ViewV2Type.CALCULATION, schema: { sum: { visible: true, @@ -3701,934 +4085,572 @@ datasourceDescribe( }, }) - const apertureScience = await config.api.row.save(companies._id!, { - name: "Aperture Science Laboratories", - }) - - const blackMesa = await config.api.row.save(companies._id!, { - name: "Black Mesa", - }) - - await Promise.all([ - config.api.row.save(employees._id!, { - name: "Alice", - age: 25, - company: apertureScience._id, - }), - config.api.row.save(employees._id!, { - name: "Bob", - age: 30, - company: apertureScience._id, - }), - config.api.row.save(employees._id!, { - name: "Charly", - age: 27, - company: blackMesa._id, - }), - config.api.row.save(employees._id!, { - name: "Danny", - age: 15, - company: blackMesa._id, - }), - ]) - - const { rows } = await config.api.viewV2.search(view.id, { + const response = await config.api.viewV2.search(view.id, { query: {}, }) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual(55) + expect(response.rows).toHaveLength(1) + expect(response.rows[0].sum).toEqual(61) }) - it("should be able to count non-numeric fields", async () => { + it("should be able to filter on a single user field in both the view query and search query", async () => { const table = await config.api.table.save( saveTableRequest({ schema: { - firstName: { - type: FieldType.STRING, - name: "firstName", - }, - lastName: { - type: FieldType.STRING, - name: "lastName", + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, }, }, }) ) - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - count: { - visible: true, - calculationType: CalculationType.COUNT, - field: "firstName", - }, - }, + await config.api.row.save(table._id!, { + user: config.getUser()._id, }) - await config.api.row.bulkImport(table._id!, { - rows: [ - { firstName: "Jane", lastName: "Smith" }, - { firstName: "Jane", lastName: "Doe" }, - { firstName: "Alice", lastName: "Smith" }, - ], - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - }) - - expect(rows).toHaveLength(1) - expect(rows[0].count).toEqual(3) - }) - - it("should be able to filter rows on the view itself", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - const view = await config.api.viewV2.create({ tableId: table._id!, name: generator.guid(), - type: ViewV2Type.CALCULATION, queryUI: { groups: [ { filters: [ { operator: BasicOperator.EQUAL, - field: "quantity", - value: 1, + field: "user", + value: "{{ [user].[_id] }}", }, ], }, ], }, schema: { - sum: { + user: { visible: true, - calculationType: CalculationType.SUM, - field: "price", }, }, }) - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - }) - expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual(3) - }) - - it("should be able to filter on group by fields", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { visible: true }, - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - const { rows } = await config.api.viewV2.search(view.id, { query: { equal: { - quantity: 1, + user: "{{ [user].[_id] }}", }, }, }) expect(rows).toHaveLength(1) - expect(rows[0].sum).toEqual(3) + expect(rows[0].user._id).toEqual(config.getUser()._id) }) - it("should be able to sort by group by field", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { visible: true }, - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - sort: "quantity", - sortOrder: SortOrder.DESCENDING, - }) - - expect(rows).toEqual([ - expect.objectContaining({ quantity: 2, sum: 10 }), - expect.objectContaining({ quantity: 1, sum: 3 }), - ]) - }) - - it("should be able to sort by a calculation", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - quantity: { - type: FieldType.NUMBER, - name: "quantity", - }, - price: { - type: FieldType.NUMBER, - name: "price", - }, - }, - }) - ) - - await config.api.row.bulkImport(table._id!, { - rows: [ - { - quantity: 1, - price: 1, - }, - { - quantity: 1, - price: 2, - }, - { - quantity: 2, - price: 10, - }, - ], - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - quantity: { visible: true }, - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "price", - }, - }, - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - sort: "sum", - sortOrder: SortOrder.DESCENDING, - }) - - expect(rows).toEqual([ - expect.objectContaining({ quantity: 2, sum: 10 }), - expect.objectContaining({ quantity: 1, sum: 3 }), - ]) - }) - }) - - it("should not need required fields to be present", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - name: { - name: "name", - type: FieldType.STRING, - constraints: { - presence: true, - }, - }, - age: { - name: "age", - type: FieldType.NUMBER, - }, - }, - }) - ) - - await Promise.all([ - config.api.row.save(table._id!, { name: "Steve", age: 30 }), - config.api.row.save(table._id!, { name: "Jane", age: 31 }), - ]) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - type: ViewV2Type.CALCULATION, - schema: { - sum: { - visible: true, - calculationType: CalculationType.SUM, - field: "age", - }, - }, - }) - - const response = await config.api.viewV2.search(view.id, { - query: {}, - }) - - expect(response.rows).toHaveLength(1) - expect(response.rows[0].sum).toEqual(61) - }) - - it("should be able to filter on a single user field in both the view query and search query", async () => { - const table = await config.api.table.save( - saveTableRequest({ - schema: { - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - }, - }) - ) - - await config.api.row.save(table._id!, { - user: config.getUser()._id, - }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: { - groups: [ - { - filters: [ - { - operator: BasicOperator.EQUAL, - field: "user", - value: "{{ [user].[_id] }}", + describe("search operators", () => { + let table: Table + beforeEach(async () => { + table = await config.api.table.save( + saveTableRequest({ + schema: { + string: { name: "string", type: FieldType.STRING }, + longform: { name: "longform", type: FieldType.LONGFORM }, + options: { + name: "options", + type: FieldType.OPTIONS, + constraints: { inclusion: ["a", "b", "c"] }, }, - ], - }, - ], - }, - schema: { - user: { - visible: true, - }, - }, - }) - - const { rows } = await config.api.viewV2.search(view.id, { - query: { - equal: { - user: "{{ [user].[_id] }}", - }, - }, - }) - - expect(rows).toHaveLength(1) - expect(rows[0].user._id).toEqual(config.getUser()._id) - }) - - describe("search operators", () => { - let table: Table - beforeEach(async () => { - table = await config.api.table.save( - saveTableRequest({ - schema: { - string: { name: "string", type: FieldType.STRING }, - longform: { name: "longform", type: FieldType.LONGFORM }, - options: { - name: "options", - type: FieldType.OPTIONS, - constraints: { inclusion: ["a", "b", "c"] }, - }, - array: { - name: "array", - type: FieldType.ARRAY, - constraints: { - type: JsonFieldSubType.ARRAY, - inclusion: ["a", "b", "c"], - }, - }, - number: { name: "number", type: FieldType.NUMBER }, - bigint: { name: "bigint", type: FieldType.BIGINT }, - datetime: { name: "datetime", type: FieldType.DATETIME }, - boolean: { name: "boolean", type: FieldType.BOOLEAN }, - user: { - name: "user", - type: FieldType.BB_REFERENCE_SINGLE, - subtype: BBReferenceFieldSubType.USER, - }, - users: { - name: "users", - type: FieldType.BB_REFERENCE, - subtype: BBReferenceFieldSubType.USER, - constraints: { - type: JsonFieldSubType.ARRAY, + array: { + name: "array", + type: FieldType.ARRAY, + constraints: { + type: JsonFieldSubType.ARRAY, + inclusion: ["a", "b", "c"], + }, + }, + number: { name: "number", type: FieldType.NUMBER }, + bigint: { name: "bigint", type: FieldType.BIGINT }, + datetime: { name: "datetime", type: FieldType.DATETIME }, + boolean: { name: "boolean", type: FieldType.BOOLEAN }, + user: { + name: "user", + type: FieldType.BB_REFERENCE_SINGLE, + subtype: BBReferenceFieldSubType.USER, + }, + users: { + name: "users", + type: FieldType.BB_REFERENCE, + subtype: BBReferenceFieldSubType.USER, + constraints: { + type: JsonFieldSubType.ARRAY, + }, }, }, + }) + ) + }) + + interface TestCase { + name: string + query: UISearchFilter | (() => UISearchFilter) + insert: Row[] | (() => Row[]) + expected: Row[] | (() => Row[]) + searchOpts?: Partial + } + + function simpleQuery(...filters: LegacyFilter[]): UISearchFilter { + return { groups: [{ filters }] } + } + + const testCases: TestCase[] = [ + { + name: "empty query return all", + insert: [{ string: "foo" }], + query: { + onEmptyFilter: EmptyFilterOption.RETURN_ALL, }, - }) - ) - }) - - interface TestCase { - name: string - query: UISearchFilter | (() => UISearchFilter) - insert: Row[] | (() => Row[]) - expected: Row[] | (() => Row[]) - searchOpts?: Partial - } - - function simpleQuery(...filters: LegacyFilter[]): UISearchFilter { - return { groups: [{ filters }] } - } - - const testCases: TestCase[] = [ - { - name: "empty query return all", - insert: [{ string: "foo" }], - query: { - onEmptyFilter: EmptyFilterOption.RETURN_ALL, + expected: [{ string: "foo" }], }, - expected: [{ string: "foo" }], - }, - { - name: "empty query return none", - insert: [{ string: "foo" }], - query: { - onEmptyFilter: EmptyFilterOption.RETURN_NONE, + { + name: "empty query return none", + insert: [{ string: "foo" }], + query: { + onEmptyFilter: EmptyFilterOption.RETURN_NONE, + }, + expected: [], }, - expected: [], - }, - { - name: "simple string search", - insert: [{ string: "foo" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }), - expected: [{ string: "foo" }], - }, - { - name: "non matching string search", - insert: [{ string: "foo" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "string", - value: "bar", - }), - expected: [], - }, - { - name: "allOr", - insert: [{ string: "bar" }, { string: "foo" }], - query: simpleQuery( - { + { + name: "simple string search", + insert: [{ string: "foo" }], + query: simpleQuery({ operator: BasicOperator.EQUAL, field: "string", value: "foo", - }, - { + }), + expected: [{ string: "foo" }], + }, + { + name: "non matching string search", + insert: [{ string: "foo" }], + query: simpleQuery({ operator: BasicOperator.EQUAL, field: "string", value: "bar", + }), + expected: [], + }, + { + name: "allOr", + insert: [{ string: "bar" }, { string: "foo" }], + query: simpleQuery( + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "string", + value: "bar", + }, + { + operator: "allOr", + } + ), + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, }, - { - operator: "allOr", - } - ), - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, + expected: [{ string: "bar" }, { string: "foo" }], }, - expected: [{ string: "bar" }, { string: "foo" }], - }, - { - name: "can find rows with fuzzy search", - insert: [{ string: "foo" }, { string: "bar" }], - query: simpleQuery({ - operator: BasicOperator.FUZZY, - field: "string", - value: "fo", - }), - expected: [{ string: "foo" }], - }, - { - name: "can find nothing with fuzzy search", - insert: [{ string: "foo" }, { string: "bar" }], - query: simpleQuery({ - operator: BasicOperator.FUZZY, - field: "string", - value: "baz", - }), - expected: [], - }, - { - name: "can find numeric rows", - insert: [{ number: 1 }, { number: 2 }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "number", - value: 1, - }), - expected: [{ number: 1 }], - }, - { - name: "can find numeric values with rangeHigh", - insert: [{ number: 1 }, { number: 2 }, { number: 3 }], - query: simpleQuery({ - operator: "rangeHigh", - field: "number", - value: 2, - }), - searchOpts: { - sort: "number", - sortOrder: SortOrder.ASCENDING, + { + name: "can find rows with fuzzy search", + insert: [{ string: "foo" }, { string: "bar" }], + query: simpleQuery({ + operator: BasicOperator.FUZZY, + field: "string", + value: "fo", + }), + expected: [{ string: "foo" }], }, - expected: [{ number: 1 }, { number: 2 }], - }, - { - name: "can find numeric values with rangeLow", - insert: [{ number: 1 }, { number: 2 }, { number: 3 }], - query: simpleQuery({ - operator: "rangeLow", - field: "number", - value: 2, - }), - searchOpts: { - sort: "number", - sortOrder: SortOrder.ASCENDING, + { + name: "can find nothing with fuzzy search", + insert: [{ string: "foo" }, { string: "bar" }], + query: simpleQuery({ + operator: BasicOperator.FUZZY, + field: "string", + value: "baz", + }), + expected: [], }, - expected: [{ number: 2 }, { number: 3 }], - }, - { - name: "can find numeric values with full range", - insert: [{ number: 1 }, { number: 2 }, { number: 3 }], - query: simpleQuery( - { + { + name: "can find numeric rows", + insert: [{ number: 1 }, { number: 2 }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "number", + value: 1, + }), + expected: [{ number: 1 }], + }, + { + name: "can find numeric values with rangeHigh", + insert: [{ number: 1 }, { number: 2 }, { number: 3 }], + query: simpleQuery({ operator: "rangeHigh", field: "number", value: 2, + }), + searchOpts: { + sort: "number", + sortOrder: SortOrder.ASCENDING, }, - { + expected: [{ number: 1 }, { number: 2 }], + }, + { + name: "can find numeric values with rangeLow", + insert: [{ number: 1 }, { number: 2 }, { number: 3 }], + query: simpleQuery({ operator: "rangeLow", field: "number", value: 2, - } - ), - expected: [{ number: 2 }], - }, - { - name: "can find longform values", - insert: [{ longform: "foo" }, { longform: "bar" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "longform", - value: "foo", - }), - expected: [{ longform: "foo" }], - }, - { - name: "can find options values", - insert: [{ options: "a" }, { options: "b" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "options", - value: "a", - }), - expected: [{ options: "a" }], - }, - { - name: "can find array values", - insert: [ - // Number field here is just to guarantee order. - { number: 1, array: ["a"] }, - { number: 2, array: ["b"] }, - { number: 3, array: ["a", "c"] }, - ], - query: simpleQuery({ - operator: ArrayOperator.CONTAINS, - field: "array", - value: "a", - }), - searchOpts: { - sort: "number", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ array: ["a"] }, { array: ["a", "c"] }], - }, - { - name: "can find bigint values", - insert: [{ bigint: "1" }, { bigint: "2" }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "bigint", - type: FieldType.BIGINT, - value: "1", - }), - expected: [{ bigint: "1" }], - }, - { - name: "can find datetime values", - insert: [ - { datetime: "2021-01-01T00:00:00.000Z" }, - { datetime: "2021-01-02T00:00:00.000Z" }, - ], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "datetime", - type: FieldType.DATETIME, - value: "2021-01-01", - }), - expected: [{ datetime: "2021-01-01T00:00:00.000Z" }], - }, - { - name: "can find boolean values", - insert: [{ boolean: true }, { boolean: false }], - query: simpleQuery({ - operator: BasicOperator.EQUAL, - field: "boolean", - value: true, - }), - expected: [{ boolean: true }], - }, - { - name: "can find user values", - insert: () => [{ user: config.getUser() }], - query: () => - simpleQuery({ - operator: BasicOperator.EQUAL, - field: "user", - value: config.getUser()._id, }), - expected: () => [ - { - user: expect.objectContaining({ _id: config.getUser()._id }), + searchOpts: { + sort: "number", + sortOrder: SortOrder.ASCENDING, }, - ], - }, - { - name: "can find users values", - insert: () => [{ users: [config.getUser()] }], - query: () => - simpleQuery({ - operator: ArrayOperator.CONTAINS, - field: "users", - value: [config.getUser()._id], + expected: [{ number: 2 }, { number: 3 }], + }, + { + name: "can find numeric values with full range", + insert: [{ number: 1 }, { number: 2 }, { number: 3 }], + query: simpleQuery( + { + operator: "rangeHigh", + field: "number", + value: 2, + }, + { + operator: "rangeLow", + field: "number", + value: 2, + } + ), + expected: [{ number: 2 }], + }, + { + name: "can find longform values", + insert: [{ longform: "foo" }, { longform: "bar" }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "longform", + value: "foo", }), - expected: () => [ - { - users: [ - expect.objectContaining({ _id: config.getUser()._id }), + expected: [{ longform: "foo" }], + }, + { + name: "can find options values", + insert: [{ options: "a" }, { options: "b" }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "options", + value: "a", + }), + expected: [{ options: "a" }], + }, + { + name: "can find array values", + insert: [ + // Number field here is just to guarantee order. + { number: 1, array: ["a"] }, + { number: 2, array: ["b"] }, + { number: 3, array: ["a", "c"] }, + ], + query: simpleQuery({ + operator: ArrayOperator.CONTAINS, + field: "array", + value: "a", + }), + searchOpts: { + sort: "number", + sortOrder: SortOrder.ASCENDING, + }, + expected: [{ array: ["a"] }, { array: ["a", "c"] }], + }, + { + name: "can find bigint values", + insert: [{ bigint: "1" }, { bigint: "2" }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "bigint", + type: FieldType.BIGINT, + value: "1", + }), + expected: [{ bigint: "1" }], + }, + { + name: "can find datetime values", + insert: [ + { datetime: "2021-01-01T00:00:00.000Z" }, + { datetime: "2021-01-02T00:00:00.000Z" }, + ], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "datetime", + type: FieldType.DATETIME, + value: "2021-01-01", + }), + expected: [{ datetime: "2021-01-01T00:00:00.000Z" }], + }, + { + name: "can find boolean values", + insert: [{ boolean: true }, { boolean: false }], + query: simpleQuery({ + operator: BasicOperator.EQUAL, + field: "boolean", + value: true, + }), + expected: [{ boolean: true }], + }, + { + name: "can find user values", + insert: () => [{ user: config.getUser() }], + query: () => + simpleQuery({ + operator: BasicOperator.EQUAL, + field: "user", + value: config.getUser()._id, + }), + expected: () => [ + { + user: expect.objectContaining({ + _id: config.getUser()._id, + }), + }, + ], + }, + { + name: "can find users values", + insert: () => [{ users: [config.getUser()] }], + query: () => + simpleQuery({ + operator: ArrayOperator.CONTAINS, + field: "users", + value: [config.getUser()._id], + }), + expected: () => [ + { + users: [ + expect.objectContaining({ _id: config.getUser()._id }), + ], + }, + ], + }, + { + name: "can handle logical operator any", + insert: [{ string: "bar" }, { string: "foo" }], + query: { + groups: [ + { + logicalOperator: UILogicalOperator.ANY, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "string", + value: "bar", + }, + ], + }, ], }, - ], - }, - { - name: "can handle logical operator any", - insert: [{ string: "bar" }, { string: "foo" }], - query: { - groups: [ - { - logicalOperator: UILogicalOperator.ANY, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }, - { - operator: BasicOperator.EQUAL, - field: "string", - value: "bar", - }, - ], - }, - ], - }, - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ string: "bar" }, { string: "foo" }], - }, - { - name: "can handle logical operator all", - insert: [ - { string: "bar", number: 1 }, - { string: "foo", number: 2 }, - ], - query: { - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }, - { - operator: BasicOperator.EQUAL, - field: "number", - value: 2, - }, - ], - }, - ], - }, - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ string: "foo", number: 2 }], - }, - { - name: "overrides allOr with logical operators", - insert: [ - { string: "bar", number: 1 }, - { string: "foo", number: 1 }, - ], - query: { - groups: [ - { - logicalOperator: UILogicalOperator.ALL, - filters: [ - { operator: "allOr" }, - { - operator: BasicOperator.EQUAL, - field: "string", - value: "foo", - }, - { - operator: BasicOperator.EQUAL, - field: "number", - value: 1, - }, - ], - }, - ], - }, - searchOpts: { - sort: "string", - sortOrder: SortOrder.ASCENDING, - }, - expected: [{ string: "foo", number: 1 }], - }, - ] - - it.each(testCases)( - "$name", - async ({ query, insert, expected, searchOpts }) => { - // Some values can't be specified outside of a test (e.g. getting - // config.getUser(), it won't be initialised), so we use functions - // in those cases. - if (typeof insert === "function") { - insert = insert() - } - if (typeof expected === "function") { - expected = expected() - } - if (typeof query === "function") { - query = query() - } - - await config.api.row.bulkImport(table._id!, { rows: insert }) - - const view = await config.api.viewV2.create({ - tableId: table._id!, - name: generator.guid(), - queryUI: query, - schema: { - string: { visible: true }, - longform: { visible: true }, - options: { visible: true }, - array: { visible: true }, - number: { visible: true }, - bigint: { visible: true }, - datetime: { visible: true }, - boolean: { visible: true }, - user: { visible: true }, - users: { visible: true }, + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, }, - }) + expected: [{ string: "bar" }, { string: "foo" }], + }, + { + name: "can handle logical operator all", + insert: [ + { string: "bar", number: 1 }, + { string: "foo", number: 2 }, + ], + query: { + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "number", + value: 2, + }, + ], + }, + ], + }, + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, + }, + expected: [{ string: "foo", number: 2 }], + }, + { + name: "overrides allOr with logical operators", + insert: [ + { string: "bar", number: 1 }, + { string: "foo", number: 1 }, + ], + query: { + groups: [ + { + logicalOperator: UILogicalOperator.ALL, + filters: [ + { operator: "allOr" }, + { + operator: BasicOperator.EQUAL, + field: "string", + value: "foo", + }, + { + operator: BasicOperator.EQUAL, + field: "number", + value: 1, + }, + ], + }, + ], + }, + searchOpts: { + sort: "string", + sortOrder: SortOrder.ASCENDING, + }, + expected: [{ string: "foo", number: 1 }], + }, + ] - const { rows } = await config.api.viewV2.search(view.id, { - query: {}, - ...searchOpts, - }) - expect(rows).toEqual( - expected.map(r => expect.objectContaining(r)) - ) - } - ) - }) - }) + it.each(testCases)( + "$name", + async ({ query, insert, expected, searchOpts }) => { + // Some values can't be specified outside of a test (e.g. getting + // config.getUser(), it won't be initialised), so we use functions + // in those cases. + if (typeof insert === "function") { + insert = insert() + } + if (typeof expected === "function") { + expected = expected() + } + if (typeof query === "function") { + query = query() + } - describe("permissions", () => { - beforeEach(async () => { - await Promise.all( - Array.from({ length: 10 }, () => - config.api.row.save(table._id!, {}) + await config.api.row.bulkImport(table._id!, { rows: insert }) + + const view = await config.api.viewV2.create({ + tableId: table._id!, + name: generator.guid(), + queryUI: query, + schema: { + string: { visible: true }, + longform: { visible: true }, + options: { visible: true }, + array: { visible: true }, + number: { visible: true }, + bigint: { visible: true }, + datetime: { visible: true }, + boolean: { visible: true }, + user: { visible: true }, + users: { visible: true }, + }, + }) + + const { rows } = await config.api.viewV2.search(view.id, { + query: {}, + ...searchOpts, + }) + expect(rows).toEqual( + expected.map(r => expect.objectContaining(r)) + ) + } ) - ) - }) - - it("does not allow public users to fetch by default", async () => { - await config.publish() - await config.api.viewV2.publicSearch(view.id, undefined, { - status: 401, }) }) - it("allow public users to fetch when permissions are explicit", async () => { - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: view.id, + describe("permissions", () => { + beforeEach(async () => { + await Promise.all( + Array.from({ length: 10 }, () => + config.api.row.save(table._id!, {}) + ) + ) }) - await config.publish() - const response = await config.api.viewV2.publicSearch(view.id) - - expect(response.rows).toHaveLength(10) - }) - - it("allow public users to fetch when permissions are inherited", async () => { - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: table._id!, + it("does not allow public users to fetch by default", async () => { + await config.publish() + await config.api.viewV2.publicSearch(view.id, undefined, { + status: 401, + }) }) - await config.api.permission.revoke({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission - level: PermissionLevel.READ, - resourceId: view.id, + + it("allow public users to fetch when permissions are explicit", async () => { + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: view.id, + }) + await config.publish() + + const response = await config.api.viewV2.publicSearch(view.id) + + expect(response.rows).toHaveLength(10) }) - await config.publish() - const response = await config.api.viewV2.publicSearch(view.id) + it("allow public users to fetch when permissions are inherited", async () => { + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: table._id!, + }) + await config.api.permission.revoke({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, // Don't think this matters since we are revoking the permission + level: PermissionLevel.READ, + resourceId: view.id, + }) + await config.publish() - expect(response.rows).toHaveLength(10) - }) + const response = await config.api.viewV2.publicSearch(view.id) - it("respects inherited permissions, not allowing not public views from public tables", async () => { - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, - level: PermissionLevel.READ, - resourceId: table._id!, + expect(response.rows).toHaveLength(10) }) - await config.api.permission.add({ - roleId: roles.BUILTIN_ROLE_IDS.POWER, - level: PermissionLevel.READ, - resourceId: view.id, - }) - await config.publish() - await config.api.viewV2.publicSearch(view.id, undefined, { - status: 401, + it("respects inherited permissions, not allowing not public views from public tables", async () => { + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, + level: PermissionLevel.READ, + resourceId: table._id!, + }) + await config.api.permission.add({ + roleId: roles.BUILTIN_ROLE_IDS.POWER, + level: PermissionLevel.READ, + resourceId: view.id, + }) + await config.publish() + + await config.api.viewV2.publicSearch(view.id, undefined, { + status: 401, + }) }) }) }) - }) - } -) + } + ) +} diff --git a/packages/server/src/automations/tests/executeQuery.spec.ts b/packages/server/src/automations/tests/executeQuery.spec.ts index 12bbe892a0..2d65be6e58 100644 --- a/packages/server/src/automations/tests/executeQuery.spec.ts +++ b/packages/server/src/automations/tests/executeQuery.spec.ts @@ -7,71 +7,74 @@ import { import { Knex } from "knex" import { generator } from "@budibase/backend-core/tests" -datasourceDescribe( - { - name: "execute query action", - exclude: [DatabaseName.MONGODB, DatabaseName.SQS], - }, - ({ config, dsProvider }) => { - let tableName: string - let client: Knex - let datasource: Datasource - let query: Query +const descriptions = datasourceDescribe({ + exclude: [DatabaseName.MONGODB, DatabaseName.SQS], +}) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - client = ds.client! - }) +if (descriptions.length) { + describe.each(descriptions)( + "execute query action ($dbName)", + ({ config, dsProvider }) => { + let tableName: string + let client: Knex + let datasource: Datasource + let query: Query - beforeEach(async () => { - tableName = generator.guid() - await client.schema.createTable(tableName, table => { - table.string("a") - table.integer("b") + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + client = ds.client! }) - await client(tableName).insert({ a: "string", b: 1 }) - query = await setup.saveTestQuery(config, client, tableName, datasource) - }) - afterEach(async () => { - await client.schema.dropTable(tableName) - }) + beforeEach(async () => { + tableName = generator.guid() + await client.schema.createTable(tableName, table => { + table.string("a") + table.integer("b") + }) + await client(tableName).insert({ a: "string", b: 1 }) + query = await setup.saveTestQuery(config, client, tableName, datasource) + }) - it("should be able to execute a query", async () => { - let res = await setup.runStep( - config, - setup.actions.EXECUTE_QUERY.stepId, - { - query: { queryId: query._id }, - } - ) - expect(res.response).toEqual([{ a: "string", b: 1 }]) - expect(res.success).toEqual(true) - }) + afterEach(async () => { + await client.schema.dropTable(tableName) + }) - it("should handle a null query value", async () => { - let res = await setup.runStep( - config, - setup.actions.EXECUTE_QUERY.stepId, - { - query: null, - } - ) - expect(res.response.message).toEqual("Invalid inputs") - expect(res.success).toEqual(false) - }) + it("should be able to execute a query", async () => { + let res = await setup.runStep( + config, + setup.actions.EXECUTE_QUERY.stepId, + { + query: { queryId: query._id }, + } + ) + expect(res.response).toEqual([{ a: "string", b: 1 }]) + expect(res.success).toEqual(true) + }) - it("should handle an error executing a query", async () => { - let res = await setup.runStep( - config, - setup.actions.EXECUTE_QUERY.stepId, - { - query: { queryId: "wrong_id" }, - } - ) - expect(res.response).toBeDefined() - expect(res.success).toEqual(false) - }) - } -) + it("should handle a null query value", async () => { + let res = await setup.runStep( + config, + setup.actions.EXECUTE_QUERY.stepId, + { + query: null, + } + ) + expect(res.response.message).toEqual("Invalid inputs") + expect(res.success).toEqual(false) + }) + + it("should handle an error executing a query", async () => { + let res = await setup.runStep( + config, + setup.actions.EXECUTE_QUERY.stepId, + { + query: { queryId: "wrong_id" }, + } + ) + expect(res.response).toBeDefined() + expect(res.success).toEqual(false) + }) + } + ) +} diff --git a/packages/server/src/automations/tests/scenarios/scenarios.spec.ts b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts index dcfd4a4341..45b251f4c1 100644 --- a/packages/server/src/automations/tests/scenarios/scenarios.spec.ts +++ b/packages/server/src/automations/tests/scenarios/scenarios.spec.ts @@ -433,9 +433,10 @@ describe("Automation Scenarios", () => { }) }) -datasourceDescribe( - { name: "", only: [DatabaseName.MYSQL] }, - ({ config, dsProvider }) => { +const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] }) + +if (descriptions.length) { + describe.each(descriptions)("/rows ($dbName)", ({ config, dsProvider }) => { let datasource: Datasource let client: Knex @@ -531,5 +532,5 @@ datasourceDescribe( ) }) }) - } -) + }) +} diff --git a/packages/server/src/integration-test/mysql.spec.ts b/packages/server/src/integration-test/mysql.spec.ts index 8edf6a0190..9cf7242e24 100644 --- a/packages/server/src/integration-test/mysql.spec.ts +++ b/packages/server/src/integration-test/mysql.spec.ts @@ -10,119 +10,123 @@ function uniqueTableName(length?: number): string { .substring(0, length || 10) } -datasourceDescribe( - { - name: "Integration compatibility with mysql search_path", - only: [DatabaseName.MYSQL], - }, - ({ config, dsProvider }) => { - let rawDatasource: Datasource - let datasource: Datasource - let client: Knex +const mainDescriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] }) - const database = generator.guid() - const database2 = generator.guid() +if (mainDescriptions.length) { + describe.each(mainDescriptions)( + "/Integration compatibility with mysql search_path ($dbName)", + ({ config, dsProvider }) => { + let rawDatasource: Datasource + let datasource: Datasource + let client: Knex - beforeAll(async () => { - const ds = await dsProvider() - rawDatasource = ds.rawDatasource! - datasource = ds.datasource! - client = ds.client! + const database = generator.guid() + const database2 = generator.guid() - await client.raw(`CREATE DATABASE \`${database}\`;`) - await client.raw(`CREATE DATABASE \`${database2}\`;`) + beforeAll(async () => { + const ds = await dsProvider() + rawDatasource = ds.rawDatasource! + datasource = ds.datasource! + client = ds.client! - rawDatasource.config!.database = database - datasource = await config.api.datasource.create(rawDatasource) - }) + await client.raw(`CREATE DATABASE \`${database}\`;`) + await client.raw(`CREATE DATABASE \`${database2}\`;`) - afterAll(async () => { - await client.raw(`DROP DATABASE \`${database}\`;`) - await client.raw(`DROP DATABASE \`${database2}\`;`) - }) - - it("discovers tables from any schema in search path", async () => { - await client.schema.createTable(`${database}.table1`, table => { - table.increments("id1").primary() + rawDatasource.config!.database = database + datasource = await config.api.datasource.create(rawDatasource) }) - const res = await config.api.datasource.info(datasource) - expect(res.tableNames).toBeDefined() - expect(res.tableNames).toEqual(expect.arrayContaining(["table1"])) - }) - it("does not mix columns from different tables", async () => { - const repeated_table_name = "table_same_name" - await client.schema.createTable( - `${database}.${repeated_table_name}`, - table => { - table.increments("id").primary() - table.string("val1") - } - ) - await client.schema.createTable( - `${database2}.${repeated_table_name}`, - table => { - table.increments("id2").primary() - table.string("val2") - } - ) - - const res = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - tablesFilter: [repeated_table_name], + afterAll(async () => { + await client.raw(`DROP DATABASE \`${database}\`;`) + await client.raw(`DROP DATABASE \`${database2}\`;`) }) - expect(res.datasource.entities![repeated_table_name].schema).toBeDefined() - const schema = res.datasource.entities![repeated_table_name].schema - expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) - }) - } -) -datasourceDescribe( - { - name: "POST /api/datasources/:datasourceId/schema", - only: [DatabaseName.MYSQL], - }, - ({ config, dsProvider }) => { - let datasource: Datasource - let client: Knex + it("discovers tables from any schema in search path", async () => { + await client.schema.createTable(`${database}.table1`, table => { + table.increments("id1").primary() + }) + const res = await config.api.datasource.info(datasource) + expect(res.tableNames).toBeDefined() + expect(res.tableNames).toEqual(expect.arrayContaining(["table1"])) + }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - client = ds.client! - }) - - let tableName: string - beforeEach(async () => { - tableName = uniqueTableName() - }) - - afterEach(async () => { - await client.schema.dropTableIfExists(tableName) - }) - - it("recognises enum columns as options", async () => { - const enumColumnName = "status" - - await client.schema.createTable(tableName, table => { - table.increments("order_id").primary() - table.string("customer_name", 100).notNullable() - table.enum( - enumColumnName, - ["pending", "processing", "shipped", "delivered", "cancelled"], - { useNative: true, enumName: `${tableName}_${enumColumnName}` } + it("does not mix columns from different tables", async () => { + const repeated_table_name = "table_same_name" + await client.schema.createTable( + `${database}.${repeated_table_name}`, + table => { + table.increments("id").primary() + table.string("val1") + } ) + await client.schema.createTable( + `${database2}.${repeated_table_name}`, + table => { + table.increments("id2").primary() + table.string("val2") + } + ) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + tablesFilter: [repeated_table_name], + }) + expect( + res.datasource.entities![repeated_table_name].schema + ).toBeDefined() + const schema = res.datasource.entities![repeated_table_name].schema + expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) }) + } + ) - const res = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) + const descriptions = datasourceDescribe({ only: [DatabaseName.MYSQL] }) - const table = res.datasource.entities![tableName] + if (descriptions.length) { + describe.each(descriptions)( + "POST /api/datasources/:datasourceId/schema ($dbName)", + ({ config, dsProvider }) => { + let datasource: Datasource + let client: Knex - expect(table).toBeDefined() - expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) - }) + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + client = ds.client! + }) + + let tableName: string + beforeEach(async () => { + tableName = uniqueTableName() + }) + + afterEach(async () => { + await client.schema.dropTableIfExists(tableName) + }) + + it("recognises enum columns as options", async () => { + const enumColumnName = "status" + + await client.schema.createTable(tableName, table => { + table.increments("order_id").primary() + table.string("customer_name", 100).notNullable() + table.enum( + enumColumnName, + ["pending", "processing", "shipped", "delivered", "cancelled"], + { useNative: true, enumName: `${tableName}_${enumColumnName}` } + ) + }) + + const res = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + + const table = res.datasource.entities![tableName] + + expect(table).toBeDefined() + expect(table.schema[enumColumnName].type).toEqual(FieldType.OPTIONS) + }) + } + ) } -) +} diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts index 7ef6b9a968..4f63579ba1 100644 --- a/packages/server/src/integration-test/postgres.spec.ts +++ b/packages/server/src/integration-test/postgres.spec.ts @@ -8,283 +8,292 @@ import { } from "../integrations/tests/utils" import { Knex } from "knex" -datasourceDescribe( - { name: "postgres integrations", only: [DatabaseName.POSTGRES] }, - ({ config, dsProvider }) => { - let datasource: Datasource - let client: Knex +const mainDescriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - client = ds.client! - }) +if (mainDescriptions.length) { + describe.each(mainDescriptions)( + "/postgres integrations", + ({ config, dsProvider }) => { + let datasource: Datasource + let client: Knex - afterAll(config.end) - - describe("POST /api/datasources/:datasourceId/schema", () => { - let tableName: string - - beforeEach(async () => { - tableName = generator.guid().replaceAll("-", "").substring(0, 10) + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + client = ds.client! }) - afterEach(async () => { - await client.schema.dropTableIfExists(tableName) - }) + afterAll(config.end) - it("recognises when a table has no primary key", async () => { - await client.schema.createTable(tableName, table => { - table.increments("id", { primaryKey: false }) + describe("POST /api/datasources/:datasourceId/schema", () => { + let tableName: string + + beforeEach(async () => { + tableName = generator.guid().replaceAll("-", "").substring(0, 10) }) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + afterEach(async () => { + await client.schema.dropTableIfExists(tableName) }) - expect(response.errors).toEqual({ - [tableName]: "Table must have a primary key.", - }) - }) + it("recognises when a table has no primary key", async () => { + await client.schema.createTable(tableName, table => { + table.increments("id", { primaryKey: false }) + }) - it("recognises when a table is using a reserved column name", async () => { - await client.schema.createTable(tableName, table => { - table.increments("_id").primary() - }) + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) - - expect(response.errors).toEqual({ - [tableName]: "Table contains invalid columns.", - }) - }) - - it("recognises enum columns as options", async () => { - const tableName = `orders_${generator - .guid() - .replaceAll("-", "") - .substring(0, 6)}` - - await client.schema.createTable(tableName, table => { - table.increments("order_id").primary() - table.string("customer_name").notNullable() - table.enum("status", ["pending", "processing", "shipped"], { - useNative: true, - enumName: `${tableName}_status`, + expect(response.errors).toEqual({ + [tableName]: "Table must have a primary key.", }) }) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + it("recognises when a table is using a reserved column name", async () => { + await client.schema.createTable(tableName, table => { + table.increments("_id").primary() + }) + + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + + expect(response.errors).toEqual({ + [tableName]: "Table contains invalid columns.", + }) }) - const table = response.datasource.entities?.[tableName] + it("recognises enum columns as options", async () => { + const tableName = `orders_${generator + .guid() + .replaceAll("-", "") + .substring(0, 6)}` - expect(table).toBeDefined() - expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) - }) - }) + await client.schema.createTable(tableName, table => { + table.increments("order_id").primary() + table.string("customer_name").notNullable() + table.enum("status", ["pending", "processing", "shipped"], { + useNative: true, + enumName: `${tableName}_status`, + }) + }) - describe("check custom column types", () => { - beforeAll(async () => { - await client.schema.createTable("binaryTable", table => { - table.binary("id").primary() - table.string("column1") - table.integer("column2") + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + + const table = response.datasource.entities?.[tableName] + + expect(table).toBeDefined() + expect(table?.schema["status"].type).toEqual(FieldType.OPTIONS) }) }) - it("should handle binary columns", async () => { - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + describe("check custom column types", () => { + beforeAll(async () => { + await client.schema.createTable("binaryTable", table => { + table.binary("id").primary() + table.string("column1") + table.integer("column2") + }) }) - expect(response.datasource.entities).toBeDefined() - const table = response.datasource.entities?.["binaryTable"] - expect(table).toBeDefined() - expect(table?.schema.id.externalType).toBe("bytea") - const row = await config.api.row.save(table?._id!, { - id: "1111", - column1: "hello", - column2: 222, - }) - expect(row._id).toBeDefined() - const decoded = decodeURIComponent(row._id!).replace(/'/g, '"') - expect(JSON.parse(decoded)[0]).toBe("1111") - }) - }) - describe("check fetching null/not null table", () => { - beforeAll(async () => { - await client.schema.createTable("nullableTable", table => { - table.increments("order_id").primary() - table.integer("order_number").notNullable() + it("should handle binary columns", async () => { + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + expect(response.datasource.entities).toBeDefined() + const table = response.datasource.entities?.["binaryTable"] + expect(table).toBeDefined() + expect(table?.schema.id.externalType).toBe("bytea") + const row = await config.api.row.save(table?._id!, { + id: "1111", + column1: "hello", + column2: 222, + }) + expect(row._id).toBeDefined() + const decoded = decodeURIComponent(row._id!).replace(/'/g, '"') + expect(JSON.parse(decoded)[0]).toBe("1111") }) }) - it("should be able to change the table to allow nullable and refetch this", async () => { - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) - const entities = response.datasource.entities - expect(entities).toBeDefined() - const nullableTable = entities?.["nullableTable"] - expect(nullableTable).toBeDefined() - expect( - nullableTable?.schema["order_number"].constraints?.presence - ).toEqual(true) - - // need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase - // is aware of - therefore we can try to fetch and make sure BB updates correctly - await client.schema.alterTable("nullableTable", table => { - table.setNullable("order_number") + describe("check fetching null/not null table", () => { + beforeAll(async () => { + await client.schema.createTable("nullableTable", table => { + table.increments("order_id").primary() + table.integer("order_number").notNullable() + }) }) - const responseAfter = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, + it("should be able to change the table to allow nullable and refetch this", async () => { + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + const entities = response.datasource.entities + expect(entities).toBeDefined() + const nullableTable = entities?.["nullableTable"] + expect(nullableTable).toBeDefined() + expect( + nullableTable?.schema["order_number"].constraints?.presence + ).toEqual(true) + + // need to perform these calls raw to the DB so that the external state of the DB differs to what Budibase + // is aware of - therefore we can try to fetch and make sure BB updates correctly + await client.schema.alterTable("nullableTable", table => { + table.setNullable("order_number") + }) + + const responseAfter = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + const entitiesAfter = responseAfter.datasource.entities + expect(entitiesAfter).toBeDefined() + const nullableTableAfter = entitiesAfter?.["nullableTable"] + expect(nullableTableAfter).toBeDefined() + expect( + nullableTableAfter?.schema["order_number"].constraints?.presence + ).toBeUndefined() }) - const entitiesAfter = responseAfter.datasource.entities - expect(entitiesAfter).toBeDefined() - const nullableTableAfter = entitiesAfter?.["nullableTable"] - expect(nullableTableAfter).toBeDefined() - expect( - nullableTableAfter?.schema["order_number"].constraints?.presence - ).toBeUndefined() }) - }) - describe("money field 💰", () => { - const tableName = "moneytable" - let table: Table + describe("money field 💰", () => { + const tableName = "moneytable" + let table: Table - beforeAll(async () => { - await client.raw(` + beforeAll(async () => { + await client.raw(` CREATE TABLE ${tableName} ( id serial PRIMARY KEY, price money ) `) - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - }) - table = response.datasource.entities![tableName] - }) - - it("should be able to import a money field", async () => { - expect(table).toBeDefined() - expect(table?.schema.price.type).toBe(FieldType.NUMBER) - }) - - it("should be able to search a money field", async () => { - await config.api.row.bulkImport(table._id!, { - rows: [{ price: 200 }, { price: 300 }], + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + }) + table = response.datasource.entities![tableName] }) - const { rows } = await config.api.row.search(table._id!, { - query: { - equal: { - price: 200, + it("should be able to import a money field", async () => { + expect(table).toBeDefined() + expect(table?.schema.price.type).toBe(FieldType.NUMBER) + }) + + it("should be able to search a money field", async () => { + await config.api.row.bulkImport(table._id!, { + rows: [{ price: 200 }, { price: 300 }], + }) + + const { rows } = await config.api.row.search(table._id!, { + query: { + equal: { + price: 200, + }, }, - }, + }) + expect(rows).toHaveLength(1) + expect(rows[0].price).toBe("200.00") + }) + + it("should be able to update a money field", async () => { + let row = await config.api.row.save(table._id!, { price: 200 }) + expect(row.price).toBe("200.00") + + row = await config.api.row.save(table._id!, { ...row, price: 300 }) + expect(row.price).toBe("300.00") + + row = await config.api.row.save(table._id!, { + ...row, + price: "400.00", + }) + expect(row.price).toBe("400.00") }) - expect(rows).toHaveLength(1) - expect(rows[0].price).toBe("200.00") }) + } + ) - it("should be able to update a money field", async () => { - let row = await config.api.row.save(table._id!, { price: 200 }) - expect(row.price).toBe("200.00") + const descriptions = datasourceDescribe({ only: [DatabaseName.POSTGRES] }) - row = await config.api.row.save(table._id!, { ...row, price: 300 }) - expect(row.price).toBe("300.00") + if (descriptions.length) { + describe.each(descriptions)( + "Integration compatibility with postgres search_path", + ({ config, dsProvider }) => { + let datasource: Datasource + let client: Knex + let schema1: string + let schema2: string - row = await config.api.row.save(table._id!, { ...row, price: "400.00" }) - expect(row.price).toBe("400.00") - }) - }) + beforeEach(async () => { + const ds = await dsProvider() + datasource = ds.datasource! + const rawDatasource = ds.rawDatasource! + + schema1 = generator.guid().replaceAll("-", "") + schema2 = generator.guid().replaceAll("-", "") + + client = await knexClient(rawDatasource) + + await client.schema.createSchema(schema1) + await client.schema.createSchema(schema2) + + rawDatasource.config!.schema = `${schema1}, ${schema2}` + + client = await knexClient(rawDatasource) + datasource = await config.api.datasource.create(rawDatasource) + }) + + afterEach(async () => { + await client.schema.dropSchema(schema1, true) + await client.schema.dropSchema(schema2, true) + }) + + it("discovers tables from any schema in search path", async () => { + await client.schema.createTable(`${schema1}.table1`, table => { + table.increments("id1").primary() + }) + + await client.schema.createTable(`${schema2}.table2`, table => { + table.increments("id2").primary() + }) + + const response = await config.api.datasource.info(datasource) + expect(response.tableNames).toBeDefined() + expect(response.tableNames).toEqual( + expect.arrayContaining(["table1", "table2"]) + ) + }) + + it("does not mix columns from different tables", async () => { + const repeated_table_name = "table_same_name" + + await client.schema.createTable( + `${schema1}.${repeated_table_name}`, + table => { + table.increments("id").primary() + table.string("val1") + } + ) + + await client.schema.createTable( + `${schema2}.${repeated_table_name}`, + table => { + table.increments("id2").primary() + table.string("val2") + } + ) + + const response = await config.api.datasource.fetchSchema({ + datasourceId: datasource._id!, + tablesFilter: [repeated_table_name], + }) + expect( + response.datasource.entities?.[repeated_table_name].schema + ).toBeDefined() + const schema = + response.datasource.entities?.[repeated_table_name].schema + expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"]) + }) + } + ) } -) - -datasourceDescribe( - { - name: "Integration compatibility with postgres search_path", - only: [DatabaseName.POSTGRES], - }, - ({ config, dsProvider }) => { - let datasource: Datasource - let client: Knex - let schema1: string - let schema2: string - - beforeEach(async () => { - const ds = await dsProvider() - datasource = ds.datasource! - const rawDatasource = ds.rawDatasource! - - schema1 = generator.guid().replaceAll("-", "") - schema2 = generator.guid().replaceAll("-", "") - - client = await knexClient(rawDatasource) - - await client.schema.createSchema(schema1) - await client.schema.createSchema(schema2) - - rawDatasource.config!.schema = `${schema1}, ${schema2}` - - client = await knexClient(rawDatasource) - datasource = await config.api.datasource.create(rawDatasource) - }) - - afterEach(async () => { - await client.schema.dropSchema(schema1, true) - await client.schema.dropSchema(schema2, true) - }) - - it("discovers tables from any schema in search path", async () => { - await client.schema.createTable(`${schema1}.table1`, table => { - table.increments("id1").primary() - }) - - await client.schema.createTable(`${schema2}.table2`, table => { - table.increments("id2").primary() - }) - - const response = await config.api.datasource.info(datasource) - expect(response.tableNames).toBeDefined() - expect(response.tableNames).toEqual( - expect.arrayContaining(["table1", "table2"]) - ) - }) - - it("does not mix columns from different tables", async () => { - const repeated_table_name = "table_same_name" - - await client.schema.createTable( - `${schema1}.${repeated_table_name}`, - table => { - table.increments("id").primary() - table.string("val1") - } - ) - - await client.schema.createTable( - `${schema2}.${repeated_table_name}`, - table => { - table.increments("id2").primary() - table.string("val2") - } - ) - - const response = await config.api.datasource.fetchSchema({ - datasourceId: datasource._id!, - tablesFilter: [repeated_table_name], - }) - expect( - response.datasource.entities?.[repeated_table_name].schema - ).toBeDefined() - const schema = response.datasource.entities?.[repeated_table_name].schema - expect(Object.keys(schema || {}).sort()).toEqual(["id", "val1"]) - }) - } -) +} diff --git a/packages/server/src/sdk/app/rows/search/tests/search.spec.ts b/packages/server/src/sdk/app/rows/search/tests/search.spec.ts index 78e5ffa30b..b424c3707d 100644 --- a/packages/server/src/sdk/app/rows/search/tests/search.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/search.spec.ts @@ -19,202 +19,206 @@ import { tableForDatasource } from "../../../../../tests/utilities/structures" // These test cases are only for things that cannot be tested through the API // (e.g. limiting searches to returning specific fields). If it's possible to // test through the API, it should be done there instead. -datasourceDescribe( - { name: "search sdk (%s)", exclude: [DatabaseName.MONGODB] }, - ({ config, dsProvider, isInternal }) => { - let datasource: Datasource | undefined - let table: Table +const descriptions = datasourceDescribe({ exclude: [DatabaseName.MONGODB] }) - beforeAll(async () => { - const ds = await dsProvider() - datasource = ds.datasource - }) +if (descriptions.length) { + describe.each(descriptions)( + "search sdk ($dbName)", + ({ config, dsProvider, isInternal }) => { + let datasource: Datasource | undefined + let table: Table - beforeEach(async () => { - const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata = - isInternal - ? { - name: "id", - type: FieldType.AUTO, - subtype: AutoFieldSubType.AUTO_ID, - autocolumn: true, - } - : { - name: "id", - type: FieldType.NUMBER, - autocolumn: true, - } - - table = await config.api.table.save( - tableForDatasource(datasource, { - primary: ["id"], - schema: { - id: idFieldSchema, - name: { - name: "name", - type: FieldType.STRING, - }, - surname: { - name: "surname", - type: FieldType.STRING, - }, - age: { - name: "age", - type: FieldType.NUMBER, - }, - address: { - name: "address", - type: FieldType.STRING, - }, - }, - }) - ) - - for (let i = 0; i < 10; i++) { - await config.api.row.save(table._id!, { - name: generator.first(), - surname: generator.last(), - age: generator.age(), - address: generator.address(), - }) - } - }) - - afterAll(async () => { - config.end() - }) - - it("querying by fields will always return data attribute columns", async () => { - await config.doInContext(config.appId, async () => { - const { rows } = await search({ - tableId: table._id!, - query: {}, - fields: ["name", "age"], - }) - - expect(rows).toHaveLength(10) - for (const row of rows) { - const keys = Object.keys(row) - expect(keys).toContain("name") - expect(keys).toContain("age") - expect(keys).not.toContain("surname") - expect(keys).not.toContain("address") - } + beforeAll(async () => { + const ds = await dsProvider() + datasource = ds.datasource }) - }) - !isInternal && - it("will decode _id in oneOf query", async () => { - await config.doInContext(config.appId, async () => { - const result = await search({ - tableId: table._id!, - query: { - oneOf: { - _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], + beforeEach(async () => { + const idFieldSchema: NumberFieldMetadata | AutoColumnFieldMetadata = + isInternal + ? { + name: "id", + type: FieldType.AUTO, + subtype: AutoFieldSubType.AUTO_ID, + autocolumn: true, + } + : { + name: "id", + type: FieldType.NUMBER, + autocolumn: true, + } + + table = await config.api.table.save( + tableForDatasource(datasource, { + primary: ["id"], + schema: { + id: idFieldSchema, + name: { + name: "name", + type: FieldType.STRING, + }, + surname: { + name: "surname", + type: FieldType.STRING, + }, + age: { + name: "age", + type: FieldType.NUMBER, + }, + address: { + name: "address", + type: FieldType.STRING, }, }, }) + ) - expect(result.rows).toHaveLength(3) - expect(result.rows.map(row => row.id)).toEqual( - expect.arrayContaining([1, 4, 8]) - ) - }) - }) - - it("does not allow accessing hidden fields", async () => { - await config.doInContext(config.appId, async () => { - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - name: { - ...table.schema.name, - visible: true, - }, - age: { - ...table.schema.age, - visible: false, - }, - }, - }) - const result = await search({ - tableId: table._id!, - query: {}, - }) - expect(result.rows).toHaveLength(10) - for (const row of result.rows) { - const keys = Object.keys(row) - expect(keys).toContain("name") - expect(keys).toContain("surname") - expect(keys).toContain("address") - expect(keys).not.toContain("age") + for (let i = 0; i < 10; i++) { + await config.api.row.save(table._id!, { + name: generator.first(), + surname: generator.last(), + age: generator.age(), + address: generator.address(), + }) } }) - }) - it("does not allow accessing hidden fields even if requested", async () => { - await config.doInContext(config.appId, async () => { - await config.api.table.save({ - ...table, - schema: { - ...table.schema, - name: { - ...table.schema.name, - visible: true, - }, - age: { - ...table.schema.age, - visible: false, - }, - }, - }) - const result = await search({ - tableId: table._id!, - query: {}, - fields: ["name", "age"], - }) - expect(result.rows).toHaveLength(10) - for (const row of result.rows) { - const keys = Object.keys(row) - expect(keys).toContain("name") - expect(keys).not.toContain("age") - expect(keys).not.toContain("surname") - expect(keys).not.toContain("address") - } + afterAll(async () => { + config.end() }) - }) - it.each([ - [["id", "name", "age"], 3], - [["name", "age"], 10], - ])( - "cannot query by non search fields (fields: %s)", - async (queryFields, expectedRows) => { + it("querying by fields will always return data attribute columns", async () => { await config.doInContext(config.appId, async () => { const { rows } = await search({ tableId: table._id!, - query: { - $or: { - conditions: [ - { - $and: { - conditions: [ - { range: { id: { low: 2, high: 4 } } }, - { range: { id: { low: 3, high: 5 } } }, - ], - }, - }, - { equal: { id: 7 } }, - ], - }, - }, - fields: queryFields, + query: {}, + fields: ["name", "age"], }) - expect(rows).toHaveLength(expectedRows) + expect(rows).toHaveLength(10) + for (const row of rows) { + const keys = Object.keys(row) + expect(keys).toContain("name") + expect(keys).toContain("age") + expect(keys).not.toContain("surname") + expect(keys).not.toContain("address") + } }) - } - ) - } -) + }) + + !isInternal && + it("will decode _id in oneOf query", async () => { + await config.doInContext(config.appId, async () => { + const result = await search({ + tableId: table._id!, + query: { + oneOf: { + _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], + }, + }, + }) + + expect(result.rows).toHaveLength(3) + expect(result.rows.map(row => row.id)).toEqual( + expect.arrayContaining([1, 4, 8]) + ) + }) + }) + + it("does not allow accessing hidden fields", async () => { + await config.doInContext(config.appId, async () => { + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + name: { + ...table.schema.name, + visible: true, + }, + age: { + ...table.schema.age, + visible: false, + }, + }, + }) + const result = await search({ + tableId: table._id!, + query: {}, + }) + expect(result.rows).toHaveLength(10) + for (const row of result.rows) { + const keys = Object.keys(row) + expect(keys).toContain("name") + expect(keys).toContain("surname") + expect(keys).toContain("address") + expect(keys).not.toContain("age") + } + }) + }) + + it("does not allow accessing hidden fields even if requested", async () => { + await config.doInContext(config.appId, async () => { + await config.api.table.save({ + ...table, + schema: { + ...table.schema, + name: { + ...table.schema.name, + visible: true, + }, + age: { + ...table.schema.age, + visible: false, + }, + }, + }) + const result = await search({ + tableId: table._id!, + query: {}, + fields: ["name", "age"], + }) + expect(result.rows).toHaveLength(10) + for (const row of result.rows) { + const keys = Object.keys(row) + expect(keys).toContain("name") + expect(keys).not.toContain("age") + expect(keys).not.toContain("surname") + expect(keys).not.toContain("address") + } + }) + }) + + it.each([ + [["id", "name", "age"], 3], + [["name", "age"], 10], + ])( + "cannot query by non search fields (fields: %s)", + async (queryFields, expectedRows) => { + await config.doInContext(config.appId, async () => { + const { rows } = await search({ + tableId: table._id!, + query: { + $or: { + conditions: [ + { + $and: { + conditions: [ + { range: { id: { low: 2, high: 4 } } }, + { range: { id: { low: 3, high: 5 } } }, + ], + }, + }, + { equal: { id: 7 } }, + ], + }, + }, + fields: queryFields, + }) + + expect(rows).toHaveLength(expectedRows) + }) + } + ) + } + ) +} From 19416b46507ef3b52fcaeeee7b9a0b65149d2113 Mon Sep 17 00:00:00 2001 From: mike12345567 Date: Tue, 19 Nov 2024 14:41:07 +0000 Subject: [PATCH 04/16] Fix for row action. --- packages/server/src/api/routes/tests/rowAction.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/api/routes/tests/rowAction.spec.ts b/packages/server/src/api/routes/tests/rowAction.spec.ts index dabcd11b73..76046c06ea 100644 --- a/packages/server/src/api/routes/tests/rowAction.spec.ts +++ b/packages/server/src/api/routes/tests/rowAction.spec.ts @@ -978,7 +978,7 @@ describe("/rowsActions", () => { }) const descriptions = datasourceDescribe({ - exclude: [DatabaseName.SQS, DatabaseName.POSTGRES], + only: [DatabaseName.SQS, DatabaseName.POSTGRES], }) if (descriptions.length) { From b95960b01f9a9d8cafdb155379d5b29dfd5feeca Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 11:43:46 +0000 Subject: [PATCH 05/16] Enable verbose test output in CI, as well as the github-actions reporter. --- .github/workflows/budibase_ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 0447b528f0..aa7efc13a8 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -211,7 +211,7 @@ jobs: fi cd packages/server - yarn test --filter $FILTER --passWithNoTests + yarn test --filter $FILTER --verbose --reporters=default --reporters=github-actions check-pro-submodule: runs-on: ubuntu-latest From f30d56210f029d10445a6398f725c323dadf1ad0 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 11:51:32 +0000 Subject: [PATCH 06/16] Break a test on purpose to check reporter. --- packages/server/src/api/routes/tests/row.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 5bdd341beb..95d44a86ba 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -565,7 +565,7 @@ datasourceDescribe( const row = await config.api.row.save(table._id!, { food: ["orange"], }) - expect(row.food).toEqual(["orange"]) + expect(row.food).toEqual(["orangefdsfsd"]) }) it("resets back to its default value when empty", async () => { From 8d5a67b6b52528031f88776d8eb2d411939a34a2 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:07:07 +0000 Subject: [PATCH 07/16] Add reporters to rest of jest tests. --- .github/workflows/budibase_ci.yml | 14 +++++--- .../server/src/api/routes/tests/row.spec.ts | 2 +- scripts/run-affected.js | 34 ------------------- 3 files changed, 10 insertions(+), 40 deletions(-) delete mode 100755 scripts/run-affected.js diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index aa7efc13a8..1739070717 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -114,9 +114,9 @@ jobs: - name: Test run: | if ${{ env.ONLY_AFFECTED_TASKS }}; then - yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} + yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions else - yarn test --ignore=@budibase/worker --ignore=@budibase/server + yarn test --ignore=@budibase/worker --ignore=@budibase/server --verbose --reporters=default --reporters=github-actions fi test-worker: @@ -138,11 +138,15 @@ jobs: - name: Test worker run: | if ${{ env.ONLY_AFFECTED_TASKS }}; then - node scripts/run-affected.js --task=test --scope=@budibase/worker --since=${{ env.NX_BASE_BRANCH }} - else - yarn test --scope=@budibase/worker + AFFECTED=$(yarn --silent nx show projects --affected -t test --base=${{ env.NX_BASE_BRANCH }} -p @budibase/worker) + if [ -z "$AFFECTED" ]; then + echo "No affected tests to run" + exit 0 + fi fi + yarn test --scope=@budibase/worker --verbose --reporters=default --reporters=github-actions + test-server: runs-on: ubuntu-latest strategy: diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 95d44a86ba..5bdd341beb 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -565,7 +565,7 @@ datasourceDescribe( const row = await config.api.row.save(table._id!, { food: ["orange"], }) - expect(row.food).toEqual(["orangefdsfsd"]) + expect(row.food).toEqual(["orange"]) }) it("resets back to its default value when empty", async () => { diff --git a/scripts/run-affected.js b/scripts/run-affected.js deleted file mode 100755 index 97f79bb463..0000000000 --- a/scripts/run-affected.js +++ /dev/null @@ -1,34 +0,0 @@ -/*** - * Running lerna with since and scope is not working as expected. - * For example, running the command `yarn test --scope=@budibase/worker --since=master`, with changes only on `@budibase/backend-core` will not work as expected, as it does not analyse the dependencies properly. The actual `@budibase/worker` task will not be triggered. - * - * This script is using `lerna ls` to detect all the affected projects from a given commit, and if the scoped package is affected, the actual command will be executed. - * - * The current version of the script only supports a single project in the scope. - */ - -const { execSync } = require("child_process") - -const argv = require("yargs").demandOption(["task", "since", "scope"]).argv - -const { task, since, scope } = argv - -const affectedPackages = execSync( - `yarn --silent nx show projects --affected -t ${task} --base=${since} --json`, - { - encoding: "utf-8", - } -) - -const packages = JSON.parse(affectedPackages) - -const isAffected = packages.includes(scope) - -if (isAffected) { - console.log(`${scope} is affected. Running task "${task}"`) - execSync(`yarn ${task} --scope=${scope}`, { - stdio: "inherit", - }) -} else { - console.log(`${scope} is not affected. Skipping task "${task}"`) -} From b02e48fa099a4135a12ad7d7ee37c81cb8fe3483 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:10:39 +0000 Subject: [PATCH 08/16] Attempt to fix CI. --- .github/workflows/budibase_ci.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 1739070717..5decb660c1 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -116,7 +116,7 @@ jobs: if ${{ env.ONLY_AFFECTED_TASKS }}; then yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions else - yarn test --ignore=@budibase/worker --ignore=@budibase/server --verbose --reporters=default --reporters=github-actions + yarn test --ignore=@budibase/worker --ignore=@budibase/server -- --verbose --reporters=default --reporters=github-actions fi test-worker: @@ -145,7 +145,8 @@ jobs: fi fi - yarn test --scope=@budibase/worker --verbose --reporters=default --reporters=github-actions + cd packages/worker + yarn test --verbose --reporters=default --reporters=github-actions test-server: runs-on: ubuntu-latest From d3da6c08a921e64d705c3aec54c7b15c4bd2b104 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:12:21 +0000 Subject: [PATCH 09/16] Attempt to fix CI. --- .github/workflows/budibase_ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 5decb660c1..cda2c7aa2d 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -114,9 +114,9 @@ jobs: - name: Test run: | if ${{ env.ONLY_AFFECTED_TASKS }}; then - yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions + yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- -- --verbose --reporters=default --reporters=github-actions else - yarn test --ignore=@budibase/worker --ignore=@budibase/server -- --verbose --reporters=default --reporters=github-actions + yarn test --ignore=@budibase/worker --ignore=@budibase/server -- -- --verbose --reporters=default --reporters=github-actions fi test-worker: From d3d187e9aec7adb6284a945b3860b0ec2cdeef6b Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:25:48 +0000 Subject: [PATCH 10/16] Fix test-libraries yarn invocation. --- .github/workflows/budibase_ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index cda2c7aa2d..c7630e07bd 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -116,7 +116,7 @@ jobs: if ${{ env.ONLY_AFFECTED_TASKS }}; then yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- -- --verbose --reporters=default --reporters=github-actions else - yarn test --ignore=@budibase/worker --ignore=@budibase/server -- -- --verbose --reporters=default --reporters=github-actions + yarn test -- -- --ignore=@budibase/worker --ignore=@budibase/server --verbose --reporters=default --reporters=github-actions fi test-worker: From 6495a84ff9e582fe4b4cead1aa7ecc879ccd3aea Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:34:18 +0000 Subject: [PATCH 11/16] Fix test-libraries yarn invocation. --- .github/workflows/budibase_ci.yml | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index c7630e07bd..732a295aae 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -114,9 +114,9 @@ jobs: - name: Test run: | if ${{ env.ONLY_AFFECTED_TASKS }}; then - yarn test --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- -- --verbose --reporters=default --reporters=github-actions + yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions else - yarn test -- -- --ignore=@budibase/worker --ignore=@budibase/server --verbose --reporters=default --reporters=github-actions + yarn test -- --ignore=@budibase/worker --ignore=@budibase/server -- --verbose --reporters=default --reporters=github-actions fi test-worker: diff --git a/package.json b/package.json index 07e0010bf2..af39a4048d 100644 --- a/package.json +++ b/package.json @@ -57,7 +57,7 @@ "dev:server": "yarn run kill-server && lerna run --stream dev --scope @budibase/worker --scope @budibase/server", "dev:built": "yarn run kill-all && cd packages/server && yarn dev:stack:up && cd ../../ && lerna run --stream dev:built", "dev:docker": "./scripts/devDocker.sh", - "test": "lerna run --concurrency 1 --stream test --stream", + "test": "lerna run --concurrency 1 --stream test", "test:containers:kill": "./scripts/killTestcontainers.sh", "lint:eslint": "eslint packages --max-warnings=0", "lint:prettier": "prettier --check \"packages/**/*.{js,ts,svelte}\" && prettier --write \"examples/**/*.{js,ts,svelte}\"", From 8944b0908b5e264beacb41178a1a71270a56a68c Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:37:36 +0000 Subject: [PATCH 12/16] Create failing test in test-libraries --- .../backend-core/src/users/test/db.spec.ts | 21 +------------------ 1 file changed, 1 insertion(+), 20 deletions(-) diff --git a/packages/backend-core/src/users/test/db.spec.ts b/packages/backend-core/src/users/test/db.spec.ts index 3e29d6673c..a279fa3780 100644 --- a/packages/backend-core/src/users/test/db.spec.ts +++ b/packages/backend-core/src/users/test/db.spec.ts @@ -36,26 +36,7 @@ describe("UserDB", () => { describe("save", () => { describe("create", () => { it("creating a new user will persist it", async () => { - const email = generator.email({}) - const user: User = structures.users.user({ - email, - tenantId: config.getTenantId(), - }) - - await config.doInTenant(async () => { - const saveUserResponse = await db.save(user) - - const persistedUser = await db.getUserByEmail(email) - expect(persistedUser).toEqual({ - ...user, - _id: saveUserResponse._id, - _rev: expect.stringMatching(/^1-\w+/), - password: expect.not.stringMatching(user.password!), - status: UserStatus.ACTIVE, - createdAt: Date.now(), - updatedAt: new Date().toISOString(), - }) - }) + throw new Error("oh no") }) it("the same email cannot be used twice in the same tenant", async () => { From cf57c7a0d08d3ee7887a81cf59d4c2c1ffb34861 Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:37:47 +0000 Subject: [PATCH 13/16] Revert failing test. --- .../backend-core/src/users/test/db.spec.ts | 21 ++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/packages/backend-core/src/users/test/db.spec.ts b/packages/backend-core/src/users/test/db.spec.ts index a279fa3780..3e29d6673c 100644 --- a/packages/backend-core/src/users/test/db.spec.ts +++ b/packages/backend-core/src/users/test/db.spec.ts @@ -36,7 +36,26 @@ describe("UserDB", () => { describe("save", () => { describe("create", () => { it("creating a new user will persist it", async () => { - throw new Error("oh no") + const email = generator.email({}) + const user: User = structures.users.user({ + email, + tenantId: config.getTenantId(), + }) + + await config.doInTenant(async () => { + const saveUserResponse = await db.save(user) + + const persistedUser = await db.getUserByEmail(email) + expect(persistedUser).toEqual({ + ...user, + _id: saveUserResponse._id, + _rev: expect.stringMatching(/^1-\w+/), + password: expect.not.stringMatching(user.password!), + status: UserStatus.ACTIVE, + createdAt: Date.now(), + updatedAt: new Date().toISOString(), + }) + }) }) it("the same email cannot be used twice in the same tenant", async () => { From 63aa1ec0bf40e767c3f21f0a00270fa384d7b86a Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 12:50:07 +0000 Subject: [PATCH 14/16] Create failing test in test-libraries --- packages/backend-core/src/users/test/db.spec.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/packages/backend-core/src/users/test/db.spec.ts b/packages/backend-core/src/users/test/db.spec.ts index 3e29d6673c..8fce11c7f1 100644 --- a/packages/backend-core/src/users/test/db.spec.ts +++ b/packages/backend-core/src/users/test/db.spec.ts @@ -48,6 +48,7 @@ describe("UserDB", () => { const persistedUser = await db.getUserByEmail(email) expect(persistedUser).toEqual({ ...user, + ohno: "what", _id: saveUserResponse._id, _rev: expect.stringMatching(/^1-\w+/), password: expect.not.stringMatching(user.password!), From 3b439e1228767bdbdb0d11b55e8bcadd28ac632e Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 13:00:26 +0000 Subject: [PATCH 15/16] Don't prefix lerna output to allow github actions annotations through. --- .github/workflows/budibase_ci.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/budibase_ci.yml b/.github/workflows/budibase_ci.yml index 732a295aae..f1ed6eb87e 100644 --- a/.github/workflows/budibase_ci.yml +++ b/.github/workflows/budibase_ci.yml @@ -114,9 +114,9 @@ jobs: - name: Test run: | if ${{ env.ONLY_AFFECTED_TASKS }}; then - yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions + yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix --since=${{ env.NX_BASE_BRANCH }} -- --verbose --reporters=default --reporters=github-actions else - yarn test -- --ignore=@budibase/worker --ignore=@budibase/server -- --verbose --reporters=default --reporters=github-actions + yarn test -- --ignore=@budibase/worker --ignore=@budibase/server --no-prefix -- --verbose --reporters=default --reporters=github-actions fi test-worker: From f25c24dac586da5866f5e768ab6454ead7e1d5fa Mon Sep 17 00:00:00 2001 From: Sam Rose Date: Wed, 20 Nov 2024 13:06:56 +0000 Subject: [PATCH 16/16] Fix test. --- packages/backend-core/src/users/test/db.spec.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/backend-core/src/users/test/db.spec.ts b/packages/backend-core/src/users/test/db.spec.ts index 8fce11c7f1..3e29d6673c 100644 --- a/packages/backend-core/src/users/test/db.spec.ts +++ b/packages/backend-core/src/users/test/db.spec.ts @@ -48,7 +48,6 @@ describe("UserDB", () => { const persistedUser = await db.getUserByEmail(email) expect(persistedUser).toEqual({ ...user, - ohno: "what", _id: saveUserResponse._id, _rev: expect.stringMatching(/^1-\w+/), password: expect.not.stringMatching(user.password!),