budibase/packages/server/src/api/routes/tests/row.spec.ts

3779 lines
126 KiB
TypeScript
Raw Normal View History

2024-07-25 16:46:37 +02:00
import * as setup from "./utilities"
2024-07-25 16:40:49 +02:00
import { datasourceDescribe } from "../../../integrations/tests/utils"
2023-09-06 12:51:08 +02:00
2023-07-13 12:17:24 +02:00
import tk from "timekeeper"
import emitter from "../../../../src/events"
2023-07-13 12:17:24 +02:00
import { outputProcessing } from "../../../utilities/rowProcessor"
import {
context,
setEnv,
InternalTable,
tenancy,
utils,
} from "@budibase/backend-core"
2023-07-13 12:17:24 +02:00
import { quotas } from "@budibase/pro"
import {
2024-10-14 19:38:34 +02:00
AIOperationEnum,
AutoFieldSubType,
Datasource,
2024-03-01 18:03:34 +01:00
DeleteRow,
2023-10-06 09:12:45 +02:00
FieldSchema,
2023-08-11 16:00:50 +02:00
FieldType,
2024-04-26 12:23:11 +02:00
BBReferenceFieldSubType,
FormulaType,
INTERNAL_TABLE_SOURCE_ID,
2023-08-11 16:00:50 +02:00
QuotaUsageType,
2023-09-13 14:09:48 +02:00
RelationshipType,
2023-07-13 12:17:24 +02:00
Row,
SaveTableRequest,
2023-08-11 16:00:50 +02:00
StaticQuotaName,
Table,
TableSourceType,
UpdatedRowEventEmitter,
TableSchema,
2024-07-30 18:14:54 +02:00
JsonFieldSubType,
2024-08-02 10:52:48 +02:00
RowExportFormat,
2024-08-29 11:19:40 +02:00
RelationSchemaField,
FormulaResponseType,
2023-07-13 12:17:24 +02:00
} from "@budibase/types"
2024-03-20 17:06:22 +01:00
import { generator, mocks } from "@budibase/backend-core/tests"
import _, { merge } from "lodash"
import * as uuid from "uuid"
import { Knex } from "knex"
2024-07-23 14:58:21 +02:00
import { InternalTables } from "../../../db/utils"
import { withEnv } from "../../../environment"
2024-10-03 17:32:14 +02:00
import { JsTimeoutError } from "@budibase/string-templates"
import { isDate } from "../../../utilities"
import nock from "nock"
import { mockChatGPTResponse } from "../../../tests/utilities/mocks/openai"
2024-10-14 19:38:34 +02:00
2023-08-11 16:00:50 +02:00
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp)
interface WaitOptions {
name: string
matchFn?: (event: any) => boolean
}
async function waitForEvent(
opts: WaitOptions,
callback: () => Promise<void>
): Promise<any> {
const p = new Promise((resolve: any) => {
const listener = (event: any) => {
if (opts.matchFn && !opts.matchFn(event)) {
return
}
resolve(event)
emitter.off(opts.name, listener)
}
emitter.on(opts.name, listener)
})
await callback()
return await p
}
2023-08-11 16:00:50 +02:00
function encodeJS(binding: string) {
return `{{ js "${Buffer.from(binding).toString("base64")}"}}`
}
const descriptions = datasourceDescribe({ plus: true })
2024-11-18 18:34:35 +01:00
if (descriptions.length) {
describe.each(descriptions)(
"/rows ($dbName)",
({ config, dsProvider, isInternal, isMSSQL, isOracle }) => {
let table: Table
let datasource: Datasource | undefined
let client: Knex | undefined
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
const ds = await dsProvider()
datasource = ds.datasource
client = ds.client
mocks.licenses.useCloudFree()
2024-11-18 18:34:35 +01:00
})
afterAll(async () => {
setup.afterAll()
})
function saveTableRequest(
// We omit the name field here because it's generated in the function with a
// high likelihood to be unique. Tests should not have any reason to control
// the table name they're writing to.
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
): SaveTableRequest {
const defaultSchema: TableSchema = {
id: {
type: FieldType.NUMBER,
name: "id",
autocolumn: true,
constraints: {
2024-11-18 18:34:35 +01:00
presence: true,
},
2023-09-08 15:46:10 +02:00
},
2024-11-18 18:34:35 +01:00
}
2023-09-12 20:40:00 +02:00
2024-11-18 18:34:35 +01:00
for (const override of overrides) {
if (override.primary) {
delete defaultSchema.id
}
}
2024-11-18 18:34:35 +01:00
const req: SaveTableRequest = {
name: uuid.v4().substring(0, 10),
type: "table",
sourceType: datasource
? TableSourceType.EXTERNAL
: TableSourceType.INTERNAL,
sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID,
primary: ["id"],
schema: defaultSchema,
}
const merged = merge(req, ...overrides)
return merged
}
2023-05-15 20:22:22 +02:00
2024-11-18 18:34:35 +01:00
function defaultTable(
// We omit the name field here because it's generated in the function with a
// high likelihood to be unique. Tests should not have any reason to control
// the table name they're writing to.
...overrides: Partial<Omit<SaveTableRequest, "name">>[]
): SaveTableRequest {
return saveTableRequest(
{
primaryDisplay: "name",
schema: {
2024-11-18 18:34:35 +01:00
name: {
type: FieldType.STRING,
name: "name",
constraints: {
2024-11-18 18:34:35 +01:00
type: "string",
2023-09-12 20:40:00 +02:00
},
2023-05-15 20:22:22 +02:00
},
2024-11-18 18:34:35 +01:00
description: {
type: FieldType.STRING,
name: "description",
constraints: {
2024-11-18 18:34:35 +01:00
type: "string",
},
},
},
},
2024-11-18 18:34:35 +01:00
...overrides
)
2024-11-18 18:34:35 +01:00
}
2024-05-21 18:38:38 +02:00
2025-02-04 11:43:49 +01:00
const resetRowUsage = async () => {
await config.doInContext(
undefined,
async () =>
await quotas.setUsage(
0,
StaticQuotaName.ROWS,
QuotaUsageType.STATIC
)
)
}
2024-11-18 18:34:35 +01:00
const getRowUsage = async () => {
const { total } = await config.doInContext(undefined, () =>
quotas.getCurrentUsageValues(
QuotaUsageType.STATIC,
StaticQuotaName.ROWS
)
2024-11-18 18:34:35 +01:00
)
return total
}
2024-11-18 18:34:35 +01:00
const assertRowUsage = async (expected: number) => {
const usage = await getRowUsage()
2024-11-18 18:34:35 +01:00
// Because our quota tracking is not perfect, we allow a 10% margin of
// error. This is to account for the fact that parallel writes can result
// in some quota updates getting lost. We don't have any need to solve this
// right now, so we just allow for some error.
if (expected === 0) {
expect(usage).toEqual(0)
return
}
expect(usage).toBeGreaterThan(expected * 0.9)
expect(usage).toBeLessThan(expected * 1.1)
}
2024-07-15 12:10:30 +02:00
2024-11-18 18:34:35 +01:00
const defaultRowFields = isInternal
? {
type: "row",
createdAt: timestamp,
updatedAt: timestamp,
}
: undefined
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
table = await config.api.table.save(defaultTable())
2024-07-15 12:10:30 +02:00
})
2025-02-04 11:43:49 +01:00
beforeEach(async () => {
await resetRowUsage()
})
2024-11-18 18:34:35 +01:00
describe("create", () => {
it("creates a new row successfully", async () => {
const rowUsage = await getRowUsage()
const row = await config.api.row.save(table._id!, {
2024-11-18 18:34:35 +01:00
name: "Test Contact",
})
2024-11-18 18:34:35 +01:00
expect(row.name).toEqual("Test Contact")
expect(row._rev).toBeDefined()
await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage)
})
2024-11-18 18:34:35 +01:00
it("fails to create a row for a table that does not exist", async () => {
const rowUsage = await getRowUsage()
await config.api.row.save("1234567", {}, { status: 404 })
await assertRowUsage(rowUsage)
2024-07-15 17:26:15 +02:00
})
2024-11-18 18:34:35 +01:00
it("fails to create a row if required fields are missing", async () => {
const rowUsage = await getRowUsage()
2024-07-15 17:26:15 +02:00
const table = await config.api.table.save(
saveTableRequest({
schema: {
2024-11-18 18:34:35 +01:00
required: {
type: FieldType.STRING,
name: "required",
constraints: {
type: "string",
presence: true,
},
2024-07-15 17:26:15 +02:00
},
},
})
)
await config.api.row.save(
table._id!,
{},
{
2024-11-18 18:34:35 +01:00
status: 500,
2024-07-15 17:26:15 +02:00
body: {
2024-11-18 18:34:35 +01:00
validationErrors: {
required: ["can't be blank"],
},
2024-07-15 17:26:15 +02:00
},
}
)
2024-11-18 18:34:35 +01:00
await assertRowUsage(rowUsage)
2024-07-15 17:26:15 +02:00
})
2024-11-18 18:34:35 +01:00
isInternal &&
it("increment row autoId per create row request", async () => {
const rowUsage = await getRowUsage()
const newTable = await config.api.table.save(
saveTableRequest({
schema: {
"Row ID": {
name: "Row ID",
type: FieldType.NUMBER,
subtype: AutoFieldSubType.AUTO_ID,
icon: "ri-magic-line",
autocolumn: true,
constraints: {
type: "number",
presence: true,
numericality: {
greaterThanOrEqualTo: "",
lessThanOrEqualTo: "",
},
},
2024-10-02 16:35:17 +02:00
},
},
2024-11-18 18:34:35 +01:00
})
)
2024-10-02 16:35:17 +02:00
2024-11-18 18:34:35 +01:00
let previousId = 0
for (let i = 0; i < 10; i++) {
const row = await config.api.row.save(newTable._id!, {})
expect(row["Row ID"]).toBeGreaterThan(previousId)
previousId = row["Row ID"]
}
await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage)
2024-10-02 16:35:17 +02:00
})
2024-11-18 18:34:35 +01:00
isInternal &&
it("should increment auto ID correctly when creating rows in parallel", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
"Row ID": {
name: "Row ID",
type: FieldType.NUMBER,
subtype: AutoFieldSubType.AUTO_ID,
icon: "ri-magic-line",
autocolumn: true,
constraints: {
type: "number",
presence: true,
numericality: {
greaterThanOrEqualTo: "",
lessThanOrEqualTo: "",
},
},
2024-10-02 16:35:17 +02:00
},
},
2024-11-18 18:34:35 +01:00
})
)
2024-10-02 16:35:17 +02:00
2024-11-18 18:34:35 +01:00
const sequence = Array(50)
.fill(0)
.map((_, i) => i + 1)
// This block of code is simulating users creating auto ID rows at the
// same time. It's expected that this operation will sometimes return
// a document conflict error (409), but the idea is to retry in those
// situations. The code below does this a large number of times with
// small, random delays between them to try and get through the list
// as quickly as possible.
await Promise.all(
sequence.map(async () => {
const attempts = 30
for (let attempt = 0; attempt < attempts; attempt++) {
try {
await config.api.row.save(table._id!, {})
return
} catch (e) {
await new Promise(r => setTimeout(r, Math.random() * 50))
}
}
throw new Error(
`Failed to create row after ${attempts} attempts`
)
})
)
2024-10-02 16:35:17 +02:00
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows).toHaveLength(50)
2024-11-18 18:34:35 +01:00
// The main purpose of this test is to ensure that even under pressure,
// we maintain data integrity. An auto ID column should hand out
// monotonically increasing unique integers no matter what.
const ids = rows.map(r => r["Row ID"])
expect(ids).toEqual(expect.arrayContaining(sequence))
2024-10-02 16:35:17 +02:00
})
2024-11-18 18:34:35 +01:00
isInternal &&
it("doesn't allow creating in user table", async () => {
const response = await config.api.row.save(
InternalTable.USER_METADATA,
{
firstName: "Joe",
lastName: "Joe",
email: "joe@joe.com",
roles: {},
},
{ status: 400 }
)
expect(response.message).toBe("Cannot create new user entry.")
})
2024-10-02 16:35:17 +02:00
2024-11-18 18:34:35 +01:00
it("should not mis-parse date string out of JSON", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
2024-11-18 18:34:35 +01:00
name: {
type: FieldType.STRING,
name: "name",
},
},
})
)
const row = await config.api.row.save(table._id!, {
2024-11-18 18:34:35 +01:00
name: `{ "foo": "2023-01-26T11:48:57.000Z" }`,
})
2024-11-18 18:34:35 +01:00
expect(row.name).toEqual(`{ "foo": "2023-01-26T11:48:57.000Z" }`)
})
2024-11-18 18:34:35 +01:00
describe("default values", () => {
let table: Table
2024-11-18 18:34:35 +01:00
describe("string column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
description: {
name: "description",
type: FieldType.STRING,
default: "default description",
},
},
})
)
})
2024-11-18 18:34:35 +01:00
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.description).toEqual("default description")
})
2024-11-18 18:34:35 +01:00
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
description: "specified description",
})
expect(row.description).toEqual("specified description")
})
2024-11-18 18:34:35 +01:00
it("uses the default value if value is null", async () => {
const row = await config.api.row.save(table._id!, {
description: null,
})
expect(row.description).toEqual("default description")
})
2024-11-18 18:34:35 +01:00
it("uses the default value if value is undefined", async () => {
const row = await config.api.row.save(table._id!, {
description: undefined,
})
expect(row.description).toEqual("default description")
})
})
2024-11-18 18:34:35 +01:00
describe("number column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
age: {
name: "age",
type: FieldType.NUMBER,
default: "25",
},
},
})
)
})
2024-11-18 18:34:35 +01:00
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.age).toEqual(25)
})
2024-11-18 18:34:35 +01:00
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
age: 30,
})
2024-11-18 18:34:35 +01:00
expect(row.age).toEqual(30)
})
})
2024-11-18 18:34:35 +01:00
describe("date column", () => {
it("creates a row with a default value successfully", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
date: {
name: "date",
type: FieldType.DATETIME,
default: "2023-01-26T11:48:57.000Z",
},
},
2024-11-18 18:34:35 +01:00
})
)
const row = await config.api.row.save(table._id!, {})
expect(row.date).toEqual("2023-01-26T11:48:57.000Z")
})
2024-11-18 18:34:35 +01:00
it("gives an error if the default value is invalid", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
date: {
name: "date",
type: FieldType.DATETIME,
default: "invalid",
},
},
})
)
await config.api.row.save(
table._id!,
{},
{
status: 400,
body: {
message: `Invalid default value for field 'date' - Invalid date value: "invalid"`,
},
}
)
})
})
2024-07-15 17:26:15 +02:00
2024-11-18 18:34:35 +01:00
describe("options column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
status: {
name: "status",
type: FieldType.OPTIONS,
default: "requested",
constraints: {
inclusion: ["requested", "approved"],
},
},
2024-07-15 17:26:15 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.status).toEqual("requested")
})
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
status: "approved",
2024-07-15 17:26:15 +02:00
})
2024-11-18 18:34:35 +01:00
expect(row.status).toEqual("approved")
})
2024-07-15 17:26:15 +02:00
})
2024-11-18 18:34:35 +01:00
describe("array column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
food: {
name: "food",
type: FieldType.ARRAY,
default: ["apple", "orange"],
constraints: {
type: JsonFieldSubType.ARRAY,
inclusion: ["apple", "orange", "banana"],
},
},
2024-07-15 17:26:15 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.food).toEqual(["apple", "orange"])
})
it("creates a new row with a default value when given an empty list", async () => {
const row = await config.api.row.save(table._id!, { food: [] })
expect(row.food).toEqual(["apple", "orange"])
})
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
food: ["orange"],
2024-07-15 17:26:15 +02:00
})
2024-11-18 18:34:35 +01:00
expect(row.food).toEqual(["orange"])
})
it("resets back to its default value when empty", async () => {
let row = await config.api.row.save(table._id!, {
food: ["orange"],
})
row = await config.api.row.save(table._id!, { ...row, food: [] })
expect(row.food).toEqual(["apple", "orange"])
})
2024-07-15 17:26:15 +02:00
})
2024-11-18 18:34:35 +01:00
describe("user column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
user: {
name: "user",
type: FieldType.BB_REFERENCE_SINGLE,
subtype: BBReferenceFieldSubType.USER,
default: "{{ [Current User]._id }}",
},
},
2024-11-18 18:34:35 +01:00
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.user._id).toEqual(config.getUser()._id)
})
it("does not use default value if value specified", async () => {
const id = `us_${utils.newid()}`
await config.createUser({ _id: id })
const row = await config.api.row.save(table._id!, {
user: id,
})
2024-11-18 18:34:35 +01:00
expect(row.user._id).toEqual(id)
})
})
2024-11-18 18:34:35 +01:00
describe("multi-user column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
users: {
name: "users",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
default: ["{{ [Current User]._id }}"],
},
},
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.users).toHaveLength(1)
expect(row.users[0]._id).toEqual(config.getUser()._id)
})
it("does not use default value if value specified", async () => {
const id = `us_${utils.newid()}`
await config.createUser({ _id: id })
const row = await config.api.row.save(table._id!, {
users: [id],
})
expect(row.users).toHaveLength(1)
expect(row.users[0]._id).toEqual(id)
})
})
2024-11-18 18:34:35 +01:00
describe("boolean column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
2024-11-18 18:34:35 +01:00
active: {
name: "active",
type: FieldType.BOOLEAN,
default: "true",
},
},
})
)
})
2024-11-18 18:34:35 +01:00
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.active).toEqual(true)
})
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
active: false,
})
expect(row.active).toEqual(false)
})
})
describe("bigint column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
bigNumber: {
name: "bigNumber",
type: FieldType.BIGINT,
default: "1234567890",
},
},
2024-11-18 18:34:35 +01:00
})
)
})
2024-10-25 13:20:15 +02:00
2024-11-18 18:34:35 +01:00
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.bigNumber).toEqual("1234567890")
})
2024-10-25 13:20:15 +02:00
2024-11-18 18:34:35 +01:00
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
bigNumber: "9876543210",
})
expect(row.bigNumber).toEqual("9876543210")
})
})
2024-10-25 13:20:15 +02:00
2024-11-18 18:34:35 +01:00
describe("bindings", () => {
describe("string column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
description: {
name: "description",
type: FieldType.STRING,
default: `{{ date now "YYYY-MM-DDTHH:mm:ss" }}`,
},
},
})
)
})
2024-10-25 13:25:28 +02:00
2024-11-18 18:34:35 +01:00
it("can use bindings in default values", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.description).toMatch(
/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}/
)
})
2024-10-25 13:20:15 +02:00
2024-11-18 18:34:35 +01:00
it("does not use default value if value specified", async () => {
const row = await config.api.row.save(table._id!, {
description: "specified description",
})
expect(row.description).toEqual("specified description")
})
2024-10-25 13:25:28 +02:00
2024-11-18 18:34:35 +01:00
it("can bind the current user", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
user: {
name: "user",
type: FieldType.STRING,
default: `{{ [Current User]._id }}`,
},
},
})
)
const row = await config.api.row.save(table._id!, {})
expect(row.user).toEqual(config.getUser()._id)
})
it("cannot access current user password", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
user: {
name: "user",
type: FieldType.STRING,
default: `{{ user.password }}`,
},
},
})
)
const row = await config.api.row.save(table._id!, {})
// For some reason it's null for internal tables, and undefined for
// external.
expect(row.user == null).toBe(true)
})
2024-10-25 17:01:25 +02:00
})
2024-11-18 18:34:35 +01:00
describe("number column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
age: {
name: "age",
type: FieldType.NUMBER,
default: `{{ sum 10 10 5 }}`,
},
},
})
)
})
2024-10-25 17:01:25 +02:00
2024-11-18 18:34:35 +01:00
it("can use bindings in default values", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.age).toEqual(25)
})
describe("invalid default value", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
age: {
name: "age",
type: FieldType.NUMBER,
default: `{{ capitalize "invalid" }}`,
},
},
})
)
})
it("throws an error when invalid default value", async () => {
await config.api.row.save(
table._id!,
{},
{
status: 400,
body: {
message:
"Invalid default value for field 'age' - Invalid number value \"Invalid\"",
},
}
)
})
})
2024-10-25 17:01:25 +02:00
})
2024-11-18 18:34:35 +01:00
})
2024-10-25 13:20:15 +02:00
})
2024-11-18 18:34:35 +01:00
describe("relations to same table", () => {
let relatedRows: Row[]
2024-10-25 13:25:28 +02:00
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
const relatedTable = await config.api.table.save(
defaultTable({
schema: {
name: { name: "name", type: FieldType.STRING },
2024-10-25 17:01:25 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
const relatedTableId = relatedTable._id!
table = await config.api.table.save(
defaultTable({
schema: {
name: { name: "name", type: FieldType.STRING },
related1: {
type: FieldType.LINK,
name: "related1",
fieldName: "main1",
tableId: relatedTableId,
relationshipType: RelationshipType.MANY_TO_MANY,
},
related2: {
type: FieldType.LINK,
name: "related2",
fieldName: "main2",
tableId: relatedTableId,
relationshipType: RelationshipType.MANY_TO_MANY,
},
2024-10-25 17:01:25 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
relatedRows = await Promise.all([
config.api.row.save(relatedTableId, { name: "foo" }),
config.api.row.save(relatedTableId, { name: "bar" }),
config.api.row.save(relatedTableId, { name: "baz" }),
config.api.row.save(relatedTableId, { name: "boo" }),
])
})
2023-08-11 16:00:50 +02:00
2024-11-18 18:34:35 +01:00
it("can create rows with both relationships", async () => {
const row = await config.api.row.save(table._id!, {
name: "test",
related1: [relatedRows[0]._id!],
related2: [relatedRows[1]._id!],
2024-10-25 17:01:25 +02:00
})
2024-10-25 13:25:28 +02:00
2024-11-18 18:34:35 +01:00
expect(row).toEqual(
expect.objectContaining({
name: "test",
related1: [
{
_id: relatedRows[0]._id,
primaryDisplay: relatedRows[0].name,
},
],
related2: [
{
_id: relatedRows[1]._id,
primaryDisplay: relatedRows[1].name,
},
],
})
)
2024-10-25 17:01:25 +02:00
})
2024-10-25 13:25:28 +02:00
2024-11-18 18:34:35 +01:00
it("can create rows with no relationships", async () => {
const row = await config.api.row.save(table._id!, {
name: "test",
})
2024-10-25 13:25:28 +02:00
2024-11-18 18:34:35 +01:00
expect(row.related1).toBeUndefined()
expect(row.related2).toBeUndefined()
2024-10-25 13:25:28 +02:00
})
2024-10-25 17:01:25 +02:00
2024-11-18 18:34:35 +01:00
it("can create rows with only one relationships field", async () => {
const row = await config.api.row.save(table._id!, {
2024-10-25 17:01:25 +02:00
name: "test",
2024-11-18 18:34:35 +01:00
related1: [],
related2: [relatedRows[1]._id!],
2024-10-25 17:01:25 +02:00
})
2023-08-11 16:00:50 +02:00
2024-11-18 18:34:35 +01:00
expect(row).toEqual(
expect.objectContaining({
name: "test",
related2: [
{
_id: relatedRows[1]._id,
primaryDisplay: relatedRows[1].name,
},
],
})
)
expect(row.related1).toBeUndefined()
})
2024-10-25 17:01:25 +02:00
})
2024-10-25 13:25:28 +02:00
})
2020-05-28 16:39:29 +02:00
2024-11-18 18:34:35 +01:00
describe("get", () => {
it("reads an existing row successfully", async () => {
const existing = await config.api.row.save(table._id!, {})
2023-08-11 16:00:50 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.get(table._id!, existing._id!)
2024-11-18 18:34:35 +01:00
expect(res).toEqual({
...existing,
...defaultRowFields,
})
})
2023-08-11 16:00:50 +02:00
2024-11-18 18:34:35 +01:00
it("returns 404 when row does not exist", async () => {
const table = await config.api.table.save(defaultTable())
await config.api.row.save(table._id!, {})
await config.api.row.get(table._id!, "1234567", {
status: 404,
})
2024-06-21 16:31:34 +02:00
})
2024-07-23 14:58:21 +02:00
2024-11-18 18:34:35 +01:00
isInternal &&
it("can search row from user table", async () => {
const res = await config.api.row.get(
InternalTables.USER_METADATA,
config.userMetadataId!
)
2024-07-23 14:58:21 +02:00
2024-11-18 18:34:35 +01:00
expect(res).toEqual({
...config.getUser(),
_id: config.userMetadataId!,
_rev: expect.any(String),
roles: undefined,
roleId: "ADMIN",
tableId: InternalTables.USER_METADATA,
})
})
2023-09-12 09:52:46 +02:00
})
2024-11-18 18:34:35 +01:00
describe("fetch", () => {
it("fetches all rows for given tableId", async () => {
const table = await config.api.table.save(defaultTable())
const rows = await Promise.all([
config.api.row.save(table._id!, {}),
config.api.row.save(table._id!, {}),
])
2024-06-21 16:31:34 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.fetch(table._id!)
expect(res.map(r => r._id)).toEqual(
expect.arrayContaining(rows.map(r => r._id))
)
2024-06-21 16:31:34 +02:00
})
2024-11-18 18:34:35 +01:00
it("returns 404 when table does not exist", async () => {
await config.api.row.fetch("1234567", { status: 404 })
2024-06-21 16:31:34 +02:00
})
})
2024-06-21 16:31:34 +02:00
2024-11-18 18:34:35 +01:00
describe("update", () => {
it("updates an existing row successfully", async () => {
const existing = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
2024-06-21 16:31:34 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.save(table._id!, {
_id: existing._id,
_rev: existing._rev,
name: "Updated Name",
})
2024-06-21 16:31:34 +02:00
2024-11-18 18:34:35 +01:00
expect(res.name).toEqual("Updated Name")
await assertRowUsage(rowUsage)
2024-11-04 10:25:38 +01:00
})
2024-06-21 16:31:34 +02:00
2024-11-18 18:34:35 +01:00
!isInternal &&
it("can update a row on an external table with a primary key", async () => {
const tableName = uuid.v4().substring(0, 10)
await client!.schema.createTable(tableName, table => {
table.increments("id").primary()
table.string("name")
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = res.datasource.entities![tableName]
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
const row = await config.api.row.save(table._id!, {
id: 1,
name: "Row 1",
2024-10-25 17:01:25 +02:00
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
const updatedRow = await config.api.row.save(table._id!, {
_id: row._id!,
name: "Row 1 Updated",
2024-10-25 17:01:25 +02:00
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
expect(updatedRow.name).toEqual("Row 1 Updated")
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows).toHaveLength(1)
2024-10-25 17:01:25 +02:00
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
describe("relations to same table", () => {
let relatedRows: Row[]
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
const relatedTable = await config.api.table.save(
defaultTable({
schema: {
name: { name: "name", type: FieldType.STRING },
2024-10-28 10:54:00 +01:00
},
2024-11-18 18:34:35 +01:00
})
)
const relatedTableId = relatedTable._id!
table = await config.api.table.save(
defaultTable({
schema: {
name: { name: "name", type: FieldType.STRING },
related1: {
type: FieldType.LINK,
name: "related1",
fieldName: "main1",
tableId: relatedTableId,
relationshipType: RelationshipType.MANY_TO_MANY,
},
related2: {
type: FieldType.LINK,
name: "related2",
fieldName: "main2",
tableId: relatedTableId,
relationshipType: RelationshipType.MANY_TO_MANY,
},
2024-10-25 17:01:25 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
relatedRows = await Promise.all([
config.api.row.save(relatedTableId, { name: "foo" }),
config.api.row.save(relatedTableId, { name: "bar" }),
config.api.row.save(relatedTableId, { name: "baz" }),
config.api.row.save(relatedTableId, { name: "boo" }),
])
2024-10-25 15:16:54 +02:00
})
2024-10-25 17:01:25 +02:00
2024-11-18 18:34:35 +01:00
it("can edit rows with both relationships", async () => {
let row = await config.api.row.save(table._id!, {
name: "test",
related1: [relatedRows[0]._id!],
related2: [relatedRows[1]._id!],
2024-10-25 17:01:25 +02:00
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
row = await config.api.row.save(table._id!, {
...row,
related1: [relatedRows[0]._id!, relatedRows[1]._id!],
related2: [relatedRows[2]._id!],
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
expect(row).toEqual(
expect.objectContaining({
name: "test",
related1: expect.arrayContaining([
{
_id: relatedRows[0]._id,
primaryDisplay: relatedRows[0].name,
},
{
_id: relatedRows[1]._id,
primaryDisplay: relatedRows[1].name,
},
]),
related2: [
{
_id: relatedRows[2]._id,
primaryDisplay: relatedRows[2].name,
},
],
})
)
2024-10-25 17:01:25 +02:00
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
it("can drop existing relationship", async () => {
let row = await config.api.row.save(table._id!, {
2024-10-25 17:01:25 +02:00
name: "test",
2024-11-18 18:34:35 +01:00
related1: [relatedRows[0]._id!],
related2: [relatedRows[1]._id!],
2024-10-25 17:01:25 +02:00
})
2024-10-25 15:16:54 +02:00
2024-11-18 18:34:35 +01:00
row = await config.api.row.save(table._id!, {
...row,
related1: [],
related2: [relatedRows[2]._id!],
})
2024-10-25 17:01:25 +02:00
2024-11-18 18:34:35 +01:00
expect(row).toEqual(
expect.objectContaining({
name: "test",
related2: [
{
_id: relatedRows[2]._id,
primaryDisplay: relatedRows[2].name,
},
],
})
)
expect(row.related1).toBeUndefined()
2024-10-25 17:01:25 +02:00
})
2024-11-18 18:34:35 +01:00
it("can drop both relationships", async () => {
let row = await config.api.row.save(table._id!, {
2024-10-25 17:01:25 +02:00
name: "test",
2024-11-18 18:34:35 +01:00
related1: [relatedRows[0]._id!],
related2: [relatedRows[1]._id!],
2024-10-25 17:01:25 +02:00
})
2024-11-18 18:34:35 +01:00
row = await config.api.row.save(table._id!, {
...row,
related1: [],
related2: [],
})
2024-11-18 18:34:35 +01:00
expect(row).toEqual(
expect.objectContaining({
name: "test",
})
)
expect(row.related1).toBeUndefined()
expect(row.related2).toBeUndefined()
})
2024-10-25 17:01:25 +02:00
})
2024-10-25 15:16:54 +02:00
})
2023-08-11 16:00:50 +02:00
2024-11-18 18:34:35 +01:00
describe("patch", () => {
let otherTable: Table
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
table = await config.api.table.save(defaultTable())
otherTable = await config.api.table.save(
defaultTable({
schema: {
relationship: {
name: "relationship",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: table._id!,
fieldName: "relationship",
},
},
})
)
})
2020-09-10 10:36:14 +02:00
2024-11-18 18:34:35 +01:00
it("should update only the fields that are supplied", async () => {
const existing = await config.api.row.save(table._id!, {})
2022-05-11 12:32:53 +02:00
2024-11-18 18:34:35 +01:00
const rowUsage = await getRowUsage()
2022-09-28 09:56:45 +02:00
2024-11-18 18:34:35 +01:00
const row = await config.api.row.patch(table._id!, {
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
name: "Updated Name",
})
2020-09-10 10:36:14 +02:00
2024-11-18 18:34:35 +01:00
expect(row.name).toEqual("Updated Name")
expect(row.description).toEqual(existing.description)
2020-09-10 10:36:14 +02:00
2024-11-18 18:34:35 +01:00
const savedRow = await config.api.row.get(table._id!, row._id!)
2024-11-18 18:34:35 +01:00
expect(savedRow.description).toEqual(existing.description)
expect(savedRow.name).toEqual("Updated Name")
await assertRowUsage(rowUsage)
})
2024-11-18 18:34:35 +01:00
it("should update only the fields that are supplied and emit the correct oldRow", async () => {
let beforeRow = await config.api.row.save(table._id!, {
name: "test",
description: "test",
})
const opts = {
name: "row:update",
matchFn: (event: UpdatedRowEventEmitter) =>
event.row._id === beforeRow._id,
}
const event = await waitForEvent(opts, async () => {
await config.api.row.patch(table._id!, {
_id: beforeRow._id!,
_rev: beforeRow._rev!,
tableId: table._id!,
name: "Updated Name",
})
})
2024-11-18 18:34:35 +01:00
expect(event.oldRow).toBeDefined()
expect(event.oldRow.name).toEqual("test")
expect(event.row.name).toEqual("Updated Name")
expect(event.oldRow.description).toEqual(beforeRow.description)
expect(event.row.description).toEqual(beforeRow.description)
})
2024-11-18 18:34:35 +01:00
it("should throw an error when given improper types", async () => {
const existing = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
2024-11-18 18:34:35 +01:00
await config.api.row.patch(
table._id!,
{
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
name: 1,
},
{ status: 400 }
)
2024-11-18 18:34:35 +01:00
await assertRowUsage(rowUsage)
2024-06-07 10:54:14 +02:00
})
2024-11-18 18:34:35 +01:00
it("should not overwrite links if those links are not set", async () => {
let linkField: FieldSchema = {
type: FieldType.LINK,
name: "",
fieldName: "",
constraints: {
type: "array",
presence: false,
2024-06-07 10:54:14 +02:00
},
2024-11-18 18:34:35 +01:00
relationshipType: RelationshipType.ONE_TO_MANY,
tableId: InternalTable.USER_METADATA,
}
let table = await config.api.table.save({
name: "TestTable",
type: "table",
sourceType: TableSourceType.INTERNAL,
sourceId: INTERNAL_TABLE_SOURCE_ID,
schema: {
user1: { ...linkField, name: "user1", fieldName: "user1" },
user2: { ...linkField, name: "user2", fieldName: "user2" },
2024-06-07 10:54:14 +02:00
},
2024-11-18 18:34:35 +01:00
})
2024-06-07 10:54:14 +02:00
2024-11-18 18:34:35 +01:00
let user1 = await config.createUser()
let user2 = await config.createUser()
2024-06-07 10:54:14 +02:00
2024-11-18 18:34:35 +01:00
let row = await config.api.row.save(table._id!, {
user1: [{ _id: user1._id }],
user2: [{ _id: user2._id }],
})
2024-06-07 10:54:14 +02:00
2024-11-18 18:34:35 +01:00
let getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.user1[0]._id).toEqual(user1._id)
expect(getResp.user2[0]._id).toEqual(user2._id)
2024-07-11 16:54:52 +02:00
2024-11-18 18:34:35 +01:00
let patchResp = await config.api.row.patch(table._id!, {
_id: row._id!,
_rev: row._rev!,
tableId: table._id!,
user1: [{ _id: user2._id }],
})
expect(patchResp.user1[0]._id).toEqual(user2._id)
expect(patchResp.user2[0]._id).toEqual(user2._id)
2024-06-07 10:54:14 +02:00
2024-11-18 18:34:35 +01:00
getResp = await config.api.row.get(table._id!, row._id!)
expect(getResp.user1[0]._id).toEqual(user2._id)
expect(getResp.user2[0]._id).toEqual(user2._id)
2024-06-07 10:54:14 +02:00
})
2024-11-18 18:34:35 +01:00
it("should be able to remove a relationship from many side", async () => {
const row = await config.api.row.save(otherTable._id!, {
name: "test",
description: "test",
})
const row2 = await config.api.row.save(otherTable._id!, {
name: "test",
description: "test",
})
const { _id } = await config.api.row.save(table._id!, {
relationship: [{ _id: row._id }, { _id: row2._id }],
})
const relatedRow = await config.api.row.get(table._id!, _id!, {
status: 200,
})
expect(relatedRow.relationship.length).toEqual(2)
await config.api.row.save(table._id!, {
...relatedRow,
relationship: [{ _id: row._id }],
})
const afterRelatedRow = await config.api.row.get(table._id!, _id!, {
status: 200,
})
expect(afterRelatedRow.relationship.length).toEqual(1)
expect(afterRelatedRow.relationship[0]._id).toEqual(row._id)
2024-06-07 10:54:14 +02:00
})
2024-11-18 18:34:35 +01:00
it("should be able to update relationships when both columns are same name", async () => {
let row = await config.api.row.save(table._id!, {
name: "test",
description: "test",
})
let row2 = await config.api.row.save(otherTable._id!, {
name: "test",
description: "test",
relationship: [row._id],
})
row = await config.api.row.get(table._id!, row._id!)
expect(row.relationship.length).toBe(1)
const resp = await config.api.row.patch(table._id!, {
_id: row._id!,
_rev: row._rev!,
tableId: row.tableId!,
name: "test2",
relationship: [row2._id],
})
expect(resp.relationship.length).toBe(1)
})
2024-12-17 18:42:19 +01:00
it("should be able to keep linked data when updating from views that trims links from the main table", async () => {
let row = await config.api.row.save(table._id!, {
name: "main",
description: "main description",
})
const row2 = await config.api.row.save(otherTable._id!, {
name: "link",
description: "link description",
relationship: [row._id],
})
const view = await config.api.viewV2.create({
tableId: table._id!,
name: "view",
schema: {
name: { visible: true },
},
})
const resp = await config.api.row.patch(view.id, {
_id: row._id!,
_rev: row._rev!,
tableId: row.tableId!,
name: "test2",
relationship: [row2._id],
})
expect(resp.relationship).toBeUndefined()
const updatedRow = await config.api.row.get(table._id!, row._id!)
expect(updatedRow.relationship.length).toBe(1)
})
it("should be able to keep linked data when updating from views that trims links from the foreign table", async () => {
let row = await config.api.row.save(table._id!, {
name: "main",
description: "main description",
})
const row2 = await config.api.row.save(otherTable._id!, {
name: "link",
description: "link description",
relationship: [row._id],
})
const view = await config.api.viewV2.create({
tableId: otherTable._id!,
name: "view",
})
await config.api.row.patch(view.id, {
_id: row2._id!,
_rev: row2._rev!,
tableId: row2.tableId!,
})
const updatedRow = await config.api.row.get(table._id!, row._id!)
expect(updatedRow.relationship.length).toBe(1)
})
2024-11-18 18:34:35 +01:00
!isInternal &&
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
// to identity columns. This is not something Budibase does currently.
!isMSSQL &&
it("should support updating fields that are part of a composite key", async () => {
const tableRequest = saveTableRequest({
primary: ["number", "string"],
schema: {
string: {
type: FieldType.STRING,
name: "string",
},
number: {
type: FieldType.NUMBER,
name: "number",
},
},
})
2023-09-12 20:47:06 +02:00
2024-11-18 18:34:35 +01:00
delete tableRequest.schema.id
2022-05-11 12:32:53 +02:00
2024-11-18 18:34:35 +01:00
const table = await config.api.table.save(tableRequest)
2024-11-18 18:34:35 +01:00
const stringValue = generator.word()
2024-11-18 18:34:35 +01:00
// MySQL and MariaDB auto-increment fields have a minimum value of 1. If
// you try to save a row with a value of 0 it will use 1 instead.
const naturalValue = generator.integer({ min: 1, max: 1000 })
2024-11-18 18:34:35 +01:00
const existing = await config.api.row.save(table._id!, {
string: stringValue,
number: naturalValue,
})
2024-11-18 18:34:35 +01:00
expect(existing._id).toEqual(
`%5B${naturalValue}%2C'${stringValue}'%5D`
)
2024-11-18 18:34:35 +01:00
const row = await config.api.row.patch(table._id!, {
_id: existing._id!,
_rev: existing._rev!,
tableId: table._id!,
string: stringValue,
number: 1500,
})
2024-10-25 16:57:50 +02:00
2024-11-18 18:34:35 +01:00
expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`)
})
})
2024-10-25 16:57:50 +02:00
2024-11-18 18:34:35 +01:00
describe("destroy", () => {
2024-10-25 17:01:25 +02:00
beforeAll(async () => {
2024-11-18 18:34:35 +01:00
table = await config.api.table.save(defaultTable())
2024-10-25 16:57:50 +02:00
})
2024-11-18 18:34:35 +01:00
it("should be able to delete a row", async () => {
const createdRow = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
2024-10-25 17:01:25 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.bulkDelete(table._id!, {
rows: [createdRow],
2024-10-25 17:01:25 +02:00
})
2024-11-18 18:34:35 +01:00
expect(res[0]._id).toEqual(createdRow._id)
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
2024-10-25 17:01:25 +02:00
})
2024-10-25 16:57:50 +02:00
2024-11-18 18:34:35 +01:00
it("should be able to delete a row with ID only", async () => {
const createdRow = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
2024-10-25 16:57:50 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.bulkDelete(table._id!, {
rows: [createdRow._id!],
})
expect(res[0]._id).toEqual(createdRow._id)
expect(res[0].tableId).toEqual(table._id!)
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
2024-10-25 16:57:50 +02:00
})
2024-11-18 18:34:35 +01:00
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
const createdRow = await config.api.row.save(table._id!, {})
const createdRow2 = await config.api.row.save(table._id!, {})
2024-10-25 17:01:25 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.bulkDelete(table._id!, {
rows: [createdRow, createdRow2, { _id: "9999999" }],
})
2024-10-25 16:57:50 +02:00
2024-11-18 18:34:35 +01:00
expect(res.map(r => r._id)).toEqual(
expect.arrayContaining([createdRow._id, createdRow2._id])
)
expect(res.length).toEqual(2)
2024-10-25 17:01:25 +02:00
})
2023-09-12 20:47:06 +02:00
2024-11-18 18:34:35 +01:00
describe("relations to same table", () => {
let relatedRows: Row[]
2022-05-11 12:32:53 +02:00
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
const relatedTable = await config.api.table.save(
defaultTable({
schema: {
name: { name: "name", type: FieldType.STRING },
},
})
)
const relatedTableId = relatedTable._id!
table = await config.api.table.save(
defaultTable({
schema: {
name: { name: "name", type: FieldType.STRING },
related1: {
type: FieldType.LINK,
name: "related1",
fieldName: "main1",
tableId: relatedTableId,
relationshipType: RelationshipType.MANY_TO_MANY,
},
related2: {
type: FieldType.LINK,
name: "related2",
fieldName: "main2",
tableId: relatedTableId,
relationshipType: RelationshipType.MANY_TO_MANY,
},
},
})
)
relatedRows = await Promise.all([
config.api.row.save(relatedTableId, { name: "foo" }),
config.api.row.save(relatedTableId, { name: "bar" }),
config.api.row.save(relatedTableId, { name: "baz" }),
config.api.row.save(relatedTableId, { name: "boo" }),
])
})
2020-05-28 16:39:29 +02:00
2024-11-18 18:34:35 +01:00
it("can delete rows with both relationships", async () => {
const row = await config.api.row.save(table._id!, {
name: "test",
related1: [relatedRows[0]._id!],
related2: [relatedRows[1]._id!],
})
2022-05-11 12:32:53 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.delete(table._id!, { _id: row._id! })
2022-09-28 09:56:45 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.get(table._id!, row._id!, { status: 404 })
})
2024-11-18 18:34:35 +01:00
it("can delete rows with empty relationships", async () => {
const row = await config.api.row.save(table._id!, {
name: "test",
related1: [],
related2: [],
})
2023-09-12 20:47:06 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.delete(table._id!, { _id: row._id! })
2022-05-11 12:32:53 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.get(table._id!, row._id!, { status: 404 })
})
})
2024-03-01 18:03:34 +01:00
})
2022-05-11 12:32:53 +02:00
2024-11-18 18:34:35 +01:00
describe("validate", () => {
beforeAll(async () => {
table = await config.api.table.save(defaultTable())
})
2023-07-24 10:08:10 +02:00
2024-11-18 18:34:35 +01:00
it("should return no errors on valid row", async () => {
const rowUsage = await getRowUsage()
2023-07-24 10:08:10 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.validate(table._id!, {
name: "ivan",
})
2023-07-24 10:08:10 +02:00
2024-11-18 18:34:35 +01:00
expect(res.valid).toBe(true)
expect(Object.keys(res.errors)).toEqual([])
await assertRowUsage(rowUsage)
})
2023-07-24 10:08:10 +02:00
2024-11-18 18:34:35 +01:00
it("should errors on invalid row", async () => {
const rowUsage = await getRowUsage()
2023-07-24 16:03:13 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.validate(table._id!, { name: 1 })
2023-07-24 16:03:13 +02:00
2024-11-18 18:34:35 +01:00
if (isInternal) {
expect(res.valid).toBe(false)
expect(Object.keys(res.errors)).toEqual(["name"])
} else {
// Validation for external is not implemented, so it will always return valid
expect(res.valid).toBe(true)
expect(Object.keys(res.errors)).toEqual([])
}
await assertRowUsage(rowUsage)
})
2024-11-18 18:34:35 +01:00
})
2023-07-24 16:03:13 +02:00
2024-11-18 18:34:35 +01:00
describe("bulkDelete", () => {
beforeAll(async () => {
table = await config.api.table.save(defaultTable())
})
2024-11-18 18:34:35 +01:00
it("should be able to delete a bulk set of rows", async () => {
const row1 = await config.api.row.save(table._id!, {})
const row2 = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
2024-11-18 18:34:35 +01:00
const res = await config.api.row.bulkDelete(table._id!, {
rows: [row1, row2],
})
2024-11-18 18:34:35 +01:00
expect(res.length).toEqual(2)
await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
})
2024-11-18 18:34:35 +01:00
it("should be able to delete a variety of row set types", async () => {
const [row1, row2, row3] = await Promise.all([
config.api.row.save(table._id!, {}),
config.api.row.save(table._id!, {}),
config.api.row.save(table._id!, {}),
])
const rowUsage = await getRowUsage()
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.bulkDelete(table._id!, {
rows: [row1, row2._id!, { _id: row3._id }],
})
2024-11-18 18:34:35 +01:00
expect(res.length).toEqual(3)
await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage)
})
2024-11-18 18:34:35 +01:00
it("should accept a valid row object and delete the row", async () => {
const row1 = await config.api.row.save(table._id!, {})
const rowUsage = await getRowUsage()
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.delete(table._id!, row1 as DeleteRow)
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
expect(res.id).toEqual(row1._id)
await config.api.row.get(table._id!, row1._id!, { status: 404 })
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
})
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
it.each([{ not: "valid" }, { rows: 123 }, "invalid"])(
"should ignore malformed/invalid delete request: %s",
async (request: any) => {
const rowUsage = await getRowUsage()
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.delete(table._id!, request, {
status: 400,
body: {
message: "Invalid delete rows request",
},
2024-11-18 18:34:35 +01:00
})
2024-11-18 18:34:35 +01:00
await assertRowUsage(rowUsage)
}
)
})
2024-11-18 18:34:35 +01:00
describe("bulkImport", () => {
isInternal &&
it("should update Auto ID field after bulk import", async () => {
const table = await config.api.table.save(
saveTableRequest({
primary: ["autoId"],
schema: {
autoId: {
name: "autoId",
type: FieldType.NUMBER,
subtype: AutoFieldSubType.AUTO_ID,
autocolumn: true,
constraints: {
type: "number",
presence: false,
},
},
},
})
)
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
let row = await config.api.row.save(table._id!, {})
expect(row.autoId).toEqual(1)
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.bulkImport(table._id!, {
rows: [{ autoId: 2 }],
})
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
row = await config.api.row.save(table._id!, {})
expect(row.autoId).toEqual(3)
})
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
isInternal &&
it("should reject bulkImporting relationship fields", async () => {
const table1 = await config.api.table.save(saveTableRequest())
const table2 = await config.api.table.save(
saveTableRequest({
schema: {
relationship: {
name: "relationship",
type: FieldType.LINK,
tableId: table1._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
fieldName: "relationship",
},
},
})
)
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
const table1Row1 = await config.api.row.save(table1._id!, {})
await config.api.row.bulkImport(
table2._id!,
{
rows: [{ relationship: [table1Row1._id!] }],
},
{
status: 400,
body: {
message:
'Can\'t bulk import relationship fields for internal databases, found value in field "relationship"',
},
}
)
})
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
it("should be able to bulkImport rows", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
2024-07-31 14:55:25 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
const rowUsage = await getRowUsage()
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
2024-07-31 14:55:25 +02:00
},
2024-11-18 18:34:35 +01:00
{
name: "Row 2",
description: "Row 2 description",
},
],
2024-07-31 14:55:25 +02:00
})
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(2)
2024-07-31 14:55:25 +02:00
2024-11-18 18:34:35 +01:00
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
2024-07-31 14:55:25 +02:00
2024-11-18 18:34:35 +01:00
await assertRowUsage(isInternal ? rowUsage + 2 : rowUsage)
2024-07-31 14:55:25 +02:00
})
2024-11-18 18:34:35 +01:00
isInternal &&
it("should be able to update existing rows on bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
2024-07-31 14:55:25 +02:00
2024-11-18 18:34:35 +01:00
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
2024-07-31 15:00:30 +02:00
2024-11-18 18:34:35 +01:00
const rowUsage = await getRowUsage()
2024-07-31 14:55:25 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
2024-11-18 18:34:35 +01:00
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
identifierFields: ["_id"],
})
2024-07-31 15:14:29 +02:00
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
2024-07-31 15:14:29 +02:00
2024-11-18 18:34:35 +01:00
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1")
expect(rows[0].description).toEqual("Row 1 description")
expect(rows[1].name).toEqual("Row 2")
expect(rows[1].description).toEqual("Row 2 description")
expect(rows[2].name).toEqual("Updated existing row")
expect(rows[2].description).toEqual("Existing description")
2024-07-31 15:14:29 +02:00
2024-11-18 18:34:35 +01:00
await assertRowUsage(rowUsage + 2)
})
2024-07-31 15:14:29 +02:00
2024-11-18 18:34:35 +01:00
isInternal &&
it("should create new rows if not identifierFields are provided", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
2024-07-31 15:14:29 +02:00
2024-11-18 18:34:35 +01:00
const existingRow = await config.api.row.save(table._id!, {
name: "Existing row",
description: "Existing description",
})
2024-07-31 15:14:29 +02:00
2024-11-18 18:34:35 +01:00
const rowUsage = await getRowUsage()
2024-07-31 15:14:29 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.bulkImport(table._id!, {
rows: [
{
name: "Row 1",
description: "Row 1 description",
},
2024-11-18 18:34:35 +01:00
{ ...existingRow, name: "Updated existing row" },
{
name: "Row 2",
description: "Row 2 description",
},
],
})
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(4)
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Existing row")
expect(rows[0].description).toEqual("Existing description")
expect(rows[1].name).toEqual("Row 1")
expect(rows[1].description).toEqual("Row 1 description")
expect(rows[2].name).toEqual("Row 2")
expect(rows[2].description).toEqual("Row 2 description")
expect(rows[3].name).toEqual("Updated existing row")
expect(rows[3].description).toEqual("Existing description")
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
await assertRowUsage(rowUsage + 3)
})
2024-06-18 17:39:35 +02:00
2024-11-18 18:34:35 +01:00
// Upserting isn't yet supported in MSSQL / Oracle, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isOracle &&
it("should be able to update existing rows with bulkImport", async () => {
const table = await config.api.table.save(
saveTableRequest({
primary: ["userId"],
schema: {
userId: {
type: FieldType.NUMBER,
name: "userId",
constraints: {
presence: true,
},
},
name: {
type: FieldType.STRING,
name: "name",
},
description: {
type: FieldType.STRING,
name: "description",
},
},
})
)
2024-11-18 18:34:35 +01:00
const row1 = await config.api.row.save(table._id!, {
userId: 1,
name: "Row 1",
description: "Row 1 description",
})
2024-11-18 18:34:35 +01:00
const row2 = await config.api.row.save(table._id!, {
userId: 2,
name: "Row 2",
description: "Row 2 description",
})
2024-11-18 18:34:35 +01:00
await config.api.row.bulkImport(table._id!, {
identifierFields: ["userId"],
rows: [
{
userId: row1.userId,
name: "Row 1 updated",
description: "Row 1 description updated",
},
{
userId: row2.userId,
name: "Row 2 updated",
description: "Row 2 description updated",
},
{
userId: 3,
name: "Row 3",
description: "Row 3 description",
},
],
})
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
2024-11-18 18:34:35 +01:00
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1 updated")
expect(rows[0].description).toEqual("Row 1 description updated")
expect(rows[1].name).toEqual("Row 2 updated")
expect(rows[1].description).toEqual("Row 2 description updated")
expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description")
})
2024-11-18 18:34:35 +01:00
// Upserting isn't yet supported in MSSQL or Oracle, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isOracle &&
!isInternal &&
it("should be able to update existing rows with composite primary keys with bulkImport", async () => {
const tableName = uuid.v4()
await client?.schema.createTable(tableName, table => {
table.integer("companyId")
table.integer("userId")
table.string("name")
table.string("description")
table.primary(["companyId", "userId"])
})
2024-11-18 18:34:35 +01:00
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = resp.datasource.entities![tableName]
2024-11-18 18:34:35 +01:00
const row1 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 1,
name: "Row 1",
description: "Row 1 description",
})
2024-11-18 18:34:35 +01:00
const row2 = await config.api.row.save(table._id!, {
companyId: 1,
userId: 2,
name: "Row 2",
description: "Row 2 description",
})
2024-11-18 18:34:35 +01:00
await config.api.row.bulkImport(table._id!, {
identifierFields: ["companyId", "userId"],
rows: [
{
companyId: 1,
userId: row1.userId,
name: "Row 1 updated",
description: "Row 1 description updated",
},
2024-11-18 18:34:35 +01:00
{
companyId: 1,
userId: row2.userId,
name: "Row 2 updated",
description: "Row 2 description updated",
},
{
companyId: 1,
userId: 3,
name: "Row 3",
description: "Row 3 description",
},
],
2024-11-05 13:12:45 +01:00
})
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(3)
2024-11-18 18:34:35 +01:00
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Row 1 updated")
expect(rows[0].description).toEqual("Row 1 description updated")
expect(rows[1].name).toEqual("Row 2 updated")
expect(rows[1].description).toEqual("Row 2 description updated")
expect(rows[2].name).toEqual("Row 3")
expect(rows[2].description).toEqual("Row 3 description")
})
2024-11-18 18:34:35 +01:00
// Upserting isn't yet supported in MSSQL/Oracle, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isOracle &&
!isInternal &&
it("should be able to update existing rows an autoID primary key", async () => {
const tableName = uuid.v4()
await client!.schema.createTable(tableName, table => {
table.increments("userId").primary()
table.string("name")
})
2024-11-18 18:34:35 +01:00
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const table = resp.datasource.entities![tableName]
2024-11-18 18:34:35 +01:00
const row1 = await config.api.row.save(table._id!, {
name: "Clare",
})
2024-11-18 18:34:35 +01:00
const row2 = await config.api.row.save(table._id!, {
name: "Jeff",
})
2023-09-12 20:47:06 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.bulkImport(table._id!, {
identifierFields: ["userId"],
rows: [
{
userId: row1.userId,
name: "Clare updated",
2023-09-08 15:09:50 +02:00
},
2024-11-18 18:34:35 +01:00
{
userId: row2.userId,
name: "Jeff updated",
},
],
})
2024-11-18 18:34:35 +01:00
const rows = await config.api.row.fetch(table._id!)
expect(rows.length).toEqual(2)
2024-11-18 18:34:35 +01:00
rows.sort((a, b) => a.name.localeCompare(b.name))
expect(rows[0].name).toEqual("Clare updated")
expect(rows[1].name).toEqual("Jeff updated")
2023-09-12 18:07:50 +02:00
})
it("should reject bulkImport date only fields with wrong format", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
date: {
type: FieldType.DATETIME,
dateOnly: true,
name: "date",
},
},
})
)
await config.api.row.bulkImport(
table._id!,
{
rows: [
{
date: "01.02.2024",
},
],
},
{
status: 400,
body: {
message:
'Invalid format for field "date": "01.02.2024". Date-only fields must be in the format "YYYY-MM-DD".',
},
}
)
})
it("should reject bulkImport date time fields with wrong format", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
date: {
type: FieldType.DATETIME,
name: "date",
},
},
})
)
await config.api.row.bulkImport(
table._id!,
{
rows: [
{
date: "01.02.2024",
},
],
},
{
status: 400,
body: {
message:
'Invalid format for field "date": "01.02.2024". Datetime fields must be in ISO format, e.g. "YYYY-MM-DDTHH:MM:SSZ".',
},
}
)
})
it("should reject bulkImport time fields with wrong format", async () => {
const table = await config.api.table.save(
saveTableRequest({
schema: {
time: {
type: FieldType.DATETIME,
timeOnly: true,
name: "time",
},
},
})
)
await config.api.row.bulkImport(
table._id!,
{
rows: [
{
time: "3pm",
},
],
},
{
2025-01-14 15:03:59 +01:00
status: 400,
body: {
2025-01-14 15:03:59 +01:00
message:
'Invalid format for field "time": "3pm". Time-only fields must be in the format "HH:MM:SS".',
},
}
)
})
})
2024-11-18 18:34:35 +01:00
describe("enrich", () => {
beforeAll(async () => {
table = await config.api.table.save(defaultTable())
2024-07-31 14:01:38 +02:00
})
2024-11-18 18:34:35 +01:00
it("should allow enriching some linked rows", async () => {
const { linkedTable, firstRow, secondRow } = await tenancy.doInTenant(
config.getTenantId(),
async () => {
const linkedTable = await config.api.table.save(
defaultTable({
schema: {
link: {
name: "link",
fieldName: "link",
type: FieldType.LINK,
relationshipType: RelationshipType.ONE_TO_MANY,
tableId: table._id!,
},
},
})
)
const firstRow = await config.api.row.save(table._id!, {
name: "Test Contact",
description: "original description",
})
const secondRow = await config.api.row.save(linkedTable._id!, {
name: "Test 2",
description: "og desc",
link: [{ _id: firstRow._id }],
})
return { linkedTable, firstRow, secondRow }
}
)
const rowUsage = await getRowUsage()
2022-09-28 09:56:45 +02:00
2024-11-18 18:34:35 +01:00
// test basic enrichment
const resBasic = await config.api.row.get(
linkedTable._id!,
secondRow._id!
)
expect(resBasic.link.length).toBe(1)
expect(resBasic.link[0]).toEqual({
_id: firstRow._id,
primaryDisplay: firstRow.name,
})
2023-09-12 20:47:06 +02:00
2024-11-18 18:34:35 +01:00
// test full enrichment
const resEnriched = await config.api.row.getEnriched(
linkedTable._id!,
secondRow._id!
)
expect(resEnriched.link.length).toBe(1)
expect(resEnriched.link[0]._id).toBe(firstRow._id)
expect(resEnriched.link[0].name).toBe("Test Contact")
expect(resEnriched.link[0].description).toBe("original description")
await assertRowUsage(rowUsage)
})
})
isInternal &&
describe("attachments and signatures", () => {
const coreAttachmentEnrichment = async (
schema: TableSchema,
field: string,
attachmentCfg: string | string[]
) => {
const testTable = await config.api.table.save(
defaultTable({
schema,
})
)
const attachmentToStoreKey = (attachmentId: string) => {
return {
key: `${config.getAppId()}/attachments/${attachmentId}`,
}
}
const draftRow = {
name: "test",
description: "test",
[field]:
typeof attachmentCfg === "string"
? attachmentToStoreKey(attachmentCfg)
: attachmentCfg.map(attachmentToStoreKey),
tableId: testTable._id,
}
const row = await config.api.row.save(testTable._id!, draftRow)
await withEnv({ SELF_HOSTED: "true" }, async () => {
return context.doInAppContext(config.getAppId(), async () => {
const enriched: Row[] = await outputProcessing(testTable, [row])
const [targetRow] = enriched
const attachmentEntries = Array.isArray(targetRow[field])
? targetRow[field]
: [targetRow[field]]
for (const entry of attachmentEntries) {
const attachmentId = entry.key.split("/").pop()
expect(entry.url.split("?")[0]).toBe(
`/files/signed/prod-budi-app-assets/${config.getProdAppId()}/attachments/${attachmentId}`
)
}
})
})
}
2024-07-31 14:01:38 +02:00
2024-11-18 18:34:35 +01:00
it("should allow enriching single attachment rows", async () => {
await coreAttachmentEnrichment(
{
attachment: {
type: FieldType.ATTACHMENT_SINGLE,
name: "attachment",
constraints: { presence: false },
},
},
"attachment",
`${uuid.v4()}.csv`
)
})
2022-09-28 09:56:45 +02:00
2024-11-18 18:34:35 +01:00
it("should allow enriching attachment list rows", async () => {
await coreAttachmentEnrichment(
{
attachments: {
type: FieldType.ATTACHMENTS,
name: "attachments",
constraints: { type: "array", presence: false },
},
},
"attachments",
[`${uuid.v4()}.csv`]
)
})
2024-07-31 14:01:38 +02:00
2024-11-18 18:34:35 +01:00
it("should allow enriching signature rows", async () => {
await coreAttachmentEnrichment(
{
signature: {
type: FieldType.SIGNATURE_SINGLE,
name: "signature",
constraints: { presence: false },
},
},
"signature",
`${uuid.v4()}.png`
)
})
2024-07-31 14:01:38 +02:00
})
2022-09-28 09:56:45 +02:00
2024-11-18 18:34:35 +01:00
describe("exportRows", () => {
beforeEach(async () => {
table = await config.api.table.save(defaultTable())
2024-07-08 21:22:03 +02:00
})
2024-07-05 11:19:01 +02:00
2024-11-18 18:34:35 +01:00
isInternal &&
it("should not export internal couchdb fields", async () => {
const existing = await config.api.row.save(table._id!, {
name: generator.guid(),
description: generator.paragraph(),
})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
2024-07-08 21:22:03 +02:00
2024-11-18 18:34:35 +01:00
expect(Object.keys(row)).toEqual(["_id", "name", "description"])
})
2024-07-08 21:22:03 +02:00
2024-11-18 18:34:35 +01:00
!isInternal &&
it("should allow exporting all columns", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
// Ensure all original columns were exported
expect(Object.keys(row).length).toBe(Object.keys(existing).length)
Object.keys(existing).forEach(key => {
expect(row[key]).toEqual(existing[key])
})
})
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
it("should allow exporting without filtering", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!)
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
expect(row._id).toEqual(existing._id)
2024-08-02 11:00:32 +02:00
})
2024-11-18 18:34:35 +01:00
it("should allow exporting only certain columns", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [existing._id!],
columns: ["_id"],
})
2024-11-18 18:34:35 +01:00
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
// Ensure only the _id column was exported
expect(Object.keys(row).length).toEqual(1)
expect(row._id).toEqual(existing._id)
})
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
it("should handle single quotes in row filtering", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, {
rows: [`['${existing._id!}']`],
})
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
expect(row._id).toEqual(existing._id)
})
it("should return an error if no table is found", async () => {
const existing = await config.api.row.save(table._id!, {})
await config.api.row.exportRows(
"1234567",
{ rows: [existing._id!] },
RowExportFormat.JSON,
{ status: 404 }
)
2024-08-02 11:00:32 +02:00
})
2024-08-02 12:25:15 +02:00
2024-11-18 18:34:35 +01:00
// MSSQL needs a setting called IDENTITY_INSERT to be set to ON to allow writing
// to identity columns. This is not something Budibase does currently.
!isMSSQL &&
it("should handle filtering by composite primary keys", async () => {
const tableRequest = saveTableRequest({
primary: ["number", "string"],
schema: {
string: {
type: FieldType.STRING,
name: "string",
},
number: {
type: FieldType.NUMBER,
name: "number",
},
},
})
delete tableRequest.schema.id
const table = await config.api.table.save(tableRequest)
const toCreate = generator
.unique(() => generator.integer({ min: 0, max: 10000 }), 10)
.map(number => ({
number,
string: generator.word({ length: 30 }),
}))
const rows = await Promise.all(
toCreate.map(d => config.api.row.save(table._id!, d))
)
2024-08-02 12:25:15 +02:00
2024-11-18 18:34:35 +01:00
const res = await config.api.row.exportRows(table._id!, {
rows: _.sampleSize(rows, 3).map(r => r._id!),
})
const results = JSON.parse(res)
expect(results.length).toEqual(3)
})
2024-08-02 12:25:15 +02:00
2024-11-18 18:34:35 +01:00
describe("should allow exporting all column types", () => {
let tableId: string
let expectedRowData: Row
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
const fullSchema = setup.structures.fullSchemaWithoutLinks({
allRequired: true,
})
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
const table = await config.api.table.save(
saveTableRequest({
...setup.structures.basicTable(),
schema: fullSchema,
primary: ["string"],
})
)
tableId = table._id!
const rowValues: Record<keyof typeof fullSchema, any> = {
[FieldType.STRING]: generator.guid(),
[FieldType.LONGFORM]: generator.paragraph(),
[FieldType.OPTIONS]: "option 2",
[FieldType.ARRAY]: ["options 2", "options 4"],
[FieldType.NUMBER]: generator.natural(),
[FieldType.BOOLEAN]: generator.bool(),
2025-01-09 18:25:31 +01:00
[FieldType.DATETIME]: generator.date().toISOString().slice(0, 10),
2024-11-18 18:34:35 +01:00
[FieldType.ATTACHMENTS]: [setup.structures.basicAttachment()],
[FieldType.ATTACHMENT_SINGLE]: setup.structures.basicAttachment(),
[FieldType.FORMULA]: undefined, // generated field
[FieldType.AUTO]: undefined, // generated field
[FieldType.AI]: "LLM Output",
[FieldType.JSON]: { name: generator.guid() },
[FieldType.INTERNAL]: generator.guid(),
[FieldType.BARCODEQR]: generator.guid(),
[FieldType.SIGNATURE_SINGLE]: setup.structures.basicAttachment(),
[FieldType.BIGINT]: generator.integer().toString(),
[FieldType.BB_REFERENCE]: [{ _id: config.getUser()._id }],
[FieldType.BB_REFERENCE_SINGLE]: { _id: config.getUser()._id },
}
const row = await config.api.row.save(table._id!, rowValues)
expectedRowData = {
_id: row._id,
[FieldType.STRING]: rowValues[FieldType.STRING],
[FieldType.LONGFORM]: rowValues[FieldType.LONGFORM],
[FieldType.OPTIONS]: rowValues[FieldType.OPTIONS],
[FieldType.ARRAY]: rowValues[FieldType.ARRAY],
[FieldType.NUMBER]: rowValues[FieldType.NUMBER],
[FieldType.BOOLEAN]: rowValues[FieldType.BOOLEAN],
[FieldType.DATETIME]: rowValues[FieldType.DATETIME],
[FieldType.ATTACHMENTS]: rowValues[FieldType.ATTACHMENTS].map(
(a: any) =>
expect.objectContaining({
...a,
url: expect.any(String),
})
),
[FieldType.ATTACHMENT_SINGLE]: expect.objectContaining({
...rowValues[FieldType.ATTACHMENT_SINGLE],
url: expect.any(String),
}),
[FieldType.FORMULA]: fullSchema[FieldType.FORMULA].formula,
[FieldType.AUTO]: expect.any(Number),
[FieldType.AI]: expect.any(String),
[FieldType.JSON]: rowValues[FieldType.JSON],
[FieldType.INTERNAL]: rowValues[FieldType.INTERNAL],
[FieldType.BARCODEQR]: rowValues[FieldType.BARCODEQR],
[FieldType.SIGNATURE_SINGLE]: expect.objectContaining({
...rowValues[FieldType.SIGNATURE_SINGLE],
url: expect.any(String),
}),
[FieldType.BIGINT]: rowValues[FieldType.BIGINT],
[FieldType.BB_REFERENCE]: rowValues[FieldType.BB_REFERENCE].map(
expect.objectContaining
),
[FieldType.BB_REFERENCE_SINGLE]: expect.objectContaining(
rowValues[FieldType.BB_REFERENCE_SINGLE]
),
}
})
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
it("as csv", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
const jsonResult = await config.api.table.csvToJson({
csvString: exportedValue,
})
2024-08-02 11:00:32 +02:00
2025-03-03 15:44:07 +01:00
const stringified = (value: string) => JSON.stringify(value)
2024-11-18 18:34:35 +01:00
const matchingObject = (
key: string,
value: any,
isArray: boolean
) => {
2025-03-03 15:59:44 +01:00
const objectMatcher = `{"${key}":"${value[key]}".*?}`
2024-11-18 18:34:35 +01:00
if (isArray) {
return expect.stringMatching(
new RegExp(`^\\[${objectMatcher}\\]$`)
)
}
return expect.stringMatching(new RegExp(`^${objectMatcher}$`))
}
expect(jsonResult).toEqual([
{
...expectedRowData,
auto: expect.any(String),
array: stringified(expectedRowData["array"]),
attachment: matchingObject(
"key",
expectedRowData["attachment"][0].sample,
true
),
attachment_single: matchingObject(
"key",
expectedRowData["attachment_single"].sample,
false
),
boolean: stringified(expectedRowData["boolean"]),
json: stringified(expectedRowData["json"]),
number: stringified(expectedRowData["number"]),
signature_single: matchingObject(
"key",
expectedRowData["signature_single"].sample,
false
),
bb_reference: matchingObject(
"_id",
expectedRowData["bb_reference"][0].sample,
true
),
bb_reference_single: matchingObject(
"_id",
expectedRowData["bb_reference_single"].sample,
false
),
ai: "LLM Output",
2024-10-31 18:16:06 +01:00
},
2024-11-18 18:34:35 +01:00
])
2024-10-31 18:16:06 +01:00
})
2024-11-18 18:34:35 +01:00
it("as json", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON
)
2024-10-31 18:16:06 +01:00
2024-11-18 18:34:35 +01:00
const json = JSON.parse(exportedValue)
expect(json).toEqual([expectedRowData])
})
2024-10-31 18:16:06 +01:00
2024-11-18 18:34:35 +01:00
it("as json with schema", async () => {
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.JSON_WITH_SCHEMA
)
2024-10-31 18:16:06 +01:00
2024-11-18 18:34:35 +01:00
const json = JSON.parse(exportedValue)
expect(json).toEqual({
schema: expect.any(Object),
rows: [expectedRowData],
})
})
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
it("can handle csv-special characters in strings", async () => {
const badString = 'test":, wow", "test": "wow"'
const table = await config.api.table.save(
saveTableRequest({
schema: {
string: {
type: FieldType.STRING,
name: "string",
},
},
})
)
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
await config.api.row.save(table._id!, { string: badString })
2024-08-02 11:00:32 +02:00
2024-11-18 18:34:35 +01:00
const exportedValue = await config.api.row.exportRows(
table._id!,
{ query: {} },
RowExportFormat.CSV
)
2023-07-14 10:26:22 +02:00
2024-11-18 18:34:35 +01:00
const json = await config.api.table.csvToJson(
{
csvString: exportedValue,
},
{
status: 200,
}
)
2024-11-18 18:34:35 +01:00
expect(json).toHaveLength(1)
expect(json[0].string).toEqual(badString)
})
2023-09-27 16:51:42 +02:00
2024-11-18 18:34:35 +01:00
it("exported data can be re-imported", async () => {
// export all
const exportedValue = await config.api.row.exportRows(
tableId,
{ query: {} },
RowExportFormat.CSV
)
2023-09-27 16:51:42 +02:00
2024-11-18 18:34:35 +01:00
// import all twice
const rows = await config.api.table.csvToJson({
csvString: exportedValue,
})
await config.api.row.bulkImport(tableId, {
rows,
})
await config.api.row.bulkImport(tableId, {
rows,
})
2023-09-27 16:51:42 +02:00
2024-11-18 18:34:35 +01:00
const { rows: allRows } = await config.api.row.search(tableId)
2023-09-27 17:16:37 +02:00
2024-11-18 18:34:35 +01:00
const expectedRow = {
...expectedRowData,
_id: expect.any(String),
_rev: expect.any(String),
type: "row",
tableId: tableId,
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
expect(allRows).toEqual([expectedRow, expectedRow, expectedRow])
})
})
2024-03-13 17:20:45 +01:00
})
2023-09-27 17:16:37 +02:00
2024-11-18 18:34:35 +01:00
let o2mTable: Table
let m2mTable: Table
beforeAll(async () => {
o2mTable = await config.api.table.save(defaultTable())
m2mTable = await config.api.table.save(defaultTable())
2023-09-27 17:16:37 +02:00
})
2024-11-18 18:34:35 +01:00
describe.each([
[
"relationship fields",
(): Record<string, FieldSchema> => ({
user: {
name: "user",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: o2mTable._id!,
fieldName: "fk_o2m",
},
users: {
name: "users",
relationshipType: RelationshipType.MANY_TO_MANY,
type: FieldType.LINK,
tableId: m2mTable._id!,
fieldName: "fk_m2m",
},
}),
(tableId: string) =>
config.api.row.save(tableId, {
name: uuid.v4(),
description: generator.paragraph(),
tableId,
}),
(row: Row) => ({
_id: row._id,
primaryDisplay: row.name,
}),
],
[
"bb reference fields",
(): Record<string, FieldSchema> => ({
user: {
name: "user",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
},
users: {
name: "users",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USERS,
},
}),
() => config.createUser(),
(row: Row) => ({
_id: row._id,
primaryDisplay: row.email,
email: row.email,
firstName: row.firstName,
lastName: row.lastName,
}),
],
])("links - %s", (__, relSchema, dataGenerator, resultMapper) => {
let tableId: string
let o2mData: Row[]
let m2mData: Row[]
2023-09-27 17:16:37 +02:00
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
const table = await config.api.table.save(
defaultTable({ schema: relSchema() })
)
tableId = table._id!
o2mData = [
await dataGenerator(o2mTable._id!),
await dataGenerator(o2mTable._id!),
await dataGenerator(o2mTable._id!),
await dataGenerator(o2mTable._id!),
]
m2mData = [
await dataGenerator(m2mTable._id!),
await dataGenerator(m2mTable._id!),
await dataGenerator(m2mTable._id!),
await dataGenerator(m2mTable._id!),
]
})
it("can save a row when relationship fields are empty", async () => {
const row = await config.api.row.save(tableId, {
name: "foo",
description: "bar",
})
2023-09-27 17:16:37 +02:00
2024-11-18 18:34:35 +01:00
expect(row).toEqual({
_id: expect.any(String),
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
name: "foo",
description: "bar",
tableId,
2024-11-26 13:05:16 +01:00
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
2024-11-18 18:34:35 +01:00
})
})
2023-09-27 17:03:16 +02:00
2024-11-18 18:34:35 +01:00
it("can save a row with a single relationship field", async () => {
const user = _.sample(o2mData)!
const row = await config.api.row.save(tableId, {
name: "foo",
description: "bar",
user: [user],
})
2023-09-27 17:03:16 +02:00
2024-11-18 18:34:35 +01:00
expect(row).toEqual({
name: "foo",
description: "bar",
tableId,
user: [user].map(u => resultMapper(u)),
_id: expect.any(String),
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
2024-11-26 13:05:16 +01:00
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
2024-11-18 18:34:35 +01:00
})
})
2023-09-27 17:03:16 +02:00
2024-11-18 18:34:35 +01:00
it("can save a row with a multiple relationship field", async () => {
const selectedUsers = _.sampleSize(m2mData, 2)
const row = await config.api.row.save(tableId, {
name: "foo",
description: "bar",
users: selectedUsers,
})
2023-09-27 17:08:29 +02:00
2024-11-18 18:34:35 +01:00
expect(row).toEqual({
name: "foo",
description: "bar",
2023-09-27 17:48:30 +02:00
tableId,
2024-11-18 18:34:35 +01:00
users: expect.arrayContaining(
selectedUsers.map(u => resultMapper(u))
),
2023-09-27 17:48:30 +02:00
_id: expect.any(String),
_rev: expect.any(String),
2023-09-27 18:32:18 +02:00
id: isInternal ? undefined : expect.any(Number),
2024-11-18 18:34:35 +01:00
type: isInternal ? "row" : undefined,
2024-11-26 13:05:16 +01:00
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
2024-11-18 18:34:35 +01:00
})
})
it("can retrieve rows with no populated relationships", async () => {
const row = await config.api.row.save(tableId, {
name: "foo",
description: "bar",
})
const retrieved = await config.api.row.get(tableId, row._id!)
expect(retrieved).toEqual({
name: "foo",
description: "bar",
2023-09-27 17:48:30 +02:00
tableId,
2024-11-18 18:34:35 +01:00
user: undefined,
users: undefined,
_id: row._id,
2023-09-27 17:48:30 +02:00
_rev: expect.any(String),
2023-09-27 18:32:18 +02:00
id: isInternal ? undefined : expect.any(Number),
...defaultRowFields,
2024-11-18 18:34:35 +01:00
})
})
2024-08-23 10:47:26 +02:00
2024-11-18 18:34:35 +01:00
it("can retrieve rows with populated relationships", async () => {
const user1 = _.sample(o2mData)!
const [user2, user3] = _.sampleSize(m2mData, 2)
2024-08-23 10:47:26 +02:00
2024-11-18 18:34:35 +01:00
const row = await config.api.row.save(tableId, {
name: "foo",
description: "bar",
users: [user2, user3],
user: [user1],
})
2024-08-27 17:02:19 +02:00
2024-11-18 18:34:35 +01:00
const retrieved = await config.api.row.get(tableId, row._id!)
expect(retrieved).toEqual({
name: "foo",
description: "bar",
tableId,
2024-11-18 18:34:35 +01:00
user: expect.arrayContaining([user1].map(u => resultMapper(u))),
users: expect.arrayContaining(
[user2, user3].map(u => resultMapper(u))
),
_id: row._id,
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
2024-11-18 18:34:35 +01:00
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user1.id,
...defaultRowFields,
2024-11-18 18:34:35 +01:00
})
})
2024-08-27 17:02:19 +02:00
2024-11-18 18:34:35 +01:00
it("can update an existing populated row", async () => {
const user = _.sample(o2mData)!
const [users1, users2, users3] = _.sampleSize(m2mData, 3)
2024-08-23 10:47:26 +02:00
2024-11-18 18:34:35 +01:00
const row = await config.api.row.save(tableId, {
name: "foo",
description: "bar",
users: [users1, users2],
2024-08-23 10:47:26 +02:00
})
2024-08-23 14:15:23 +02:00
2024-11-18 18:34:35 +01:00
const updatedRow = await config.api.row.save(tableId, {
...row,
user: [user],
users: [users3, users1],
})
expect(updatedRow).toEqual({
name: "foo",
description: "bar",
tableId,
user: expect.arrayContaining([user].map(u => resultMapper(u))),
users: expect.arrayContaining(
[users3, users1].map(u => resultMapper(u))
),
_id: row._id,
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
type: isInternal ? "row" : undefined,
[`fk_${o2mTable.name}_fk_o2m`]: isInternal ? undefined : user.id,
2024-11-26 13:05:16 +01:00
createdAt: isInternal ? new Date().toISOString() : undefined,
updatedAt: isInternal ? new Date().toISOString() : undefined,
2024-11-18 18:34:35 +01:00
})
})
2024-08-23 10:47:26 +02:00
2024-11-18 18:34:35 +01:00
it("can wipe an existing populated relationships in row", async () => {
const [user1, user2] = _.sampleSize(m2mData, 2)
const row = await config.api.row.save(tableId, {
name: "foo",
description: "bar",
users: [user1, user2],
2024-08-29 11:19:40 +02:00
})
2024-11-18 18:34:35 +01:00
const updatedRow = await config.api.row.save(tableId, {
...row,
user: null,
users: null,
})
2024-11-27 19:37:05 +01:00
expect(updatedRow.user).toBeUndefined()
expect(updatedRow.users).toBeUndefined()
2024-11-18 18:34:35 +01:00
})
it("fetch all will populate the relationships", async () => {
const [user1] = _.sampleSize(o2mData, 1)
const [users1, users2, users3] = _.sampleSize(m2mData, 3)
const rows = [
{
name: generator.name(),
description: generator.name(),
users: [users1, users2],
2024-08-29 11:19:40 +02:00
},
2024-11-18 18:34:35 +01:00
{
name: generator.name(),
description: generator.name(),
user: [user1],
users: [users1, users3],
2024-08-29 11:19:40 +02:00
},
2024-11-18 18:34:35 +01:00
{
name: generator.name(),
description: generator.name(),
users: [users3],
2024-08-29 11:19:40 +02:00
},
2024-11-18 18:34:35 +01:00
]
await config.api.row.save(tableId, rows[0])
await config.api.row.save(tableId, rows[1])
await config.api.row.save(tableId, rows[2])
const res = await config.api.row.fetch(tableId)
expect(res).toEqual(
expect.arrayContaining(
rows.map(r => ({
name: r.name,
description: r.description,
tableId,
user: r.user?.map(u => resultMapper(u)),
users: r.users?.length
? expect.arrayContaining(r.users?.map(u => resultMapper(u)))
: undefined,
_id: expect.any(String),
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
[`fk_${o2mTable.name}_fk_o2m`]:
isInternal || !r.user?.length ? undefined : r.user[0].id,
...defaultRowFields,
}))
)
)
})
it("search all will populate the relationships", async () => {
const [user1] = _.sampleSize(o2mData, 1)
const [users1, users2, users3] = _.sampleSize(m2mData, 3)
const rows = [
{
name: generator.name(),
description: generator.name(),
users: [users1, users2],
2024-08-29 11:19:40 +02:00
},
2024-11-18 18:34:35 +01:00
{
name: generator.name(),
description: generator.name(),
user: [user1],
users: [users1, users3],
2024-08-29 11:19:40 +02:00
},
2024-11-18 18:34:35 +01:00
{
name: generator.name(),
description: generator.name(),
users: [users3],
2024-08-23 10:47:26 +02:00
},
2024-11-18 18:34:35 +01:00
]
await config.api.row.save(tableId, rows[0])
await config.api.row.save(tableId, rows[1])
await config.api.row.save(tableId, rows[2])
const res = await config.api.row.search(tableId)
expect(res).toEqual({
rows: expect.arrayContaining(
rows.map(r => ({
name: r.name,
description: r.description,
tableId,
user: r.user?.map(u => resultMapper(u)),
users: r.users?.length
? expect.arrayContaining(r.users?.map(u => resultMapper(u)))
: undefined,
_id: expect.any(String),
_rev: expect.any(String),
id: isInternal ? undefined : expect.any(Number),
[`fk_${o2mTable.name}_fk_o2m`]:
isInternal || !r.user?.length ? undefined : r.user[0].id,
...defaultRowFields,
}))
),
...(isInternal
? {}
: {
hasNextPage: false,
}),
})
2024-08-29 12:30:16 +02:00
})
2024-08-23 14:15:23 +02:00
})
2024-08-23 10:47:26 +02:00
2024-11-18 18:34:35 +01:00
// Upserting isn't yet supported in MSSQL or Oracle, see:
// https://github.com/knex/knex/pull/6050
!isMSSQL &&
!isOracle &&
describe("relationships", () => {
let tableId: string
let viewId: string
2024-08-27 17:02:19 +02:00
2024-11-18 18:34:35 +01:00
let auxData: Row[] = []
2024-08-27 17:02:19 +02:00
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
const aux2Table = await config.api.table.save(saveTableRequest())
const aux2Data = await config.api.row.save(aux2Table._id!, {})
2024-08-23 10:47:26 +02:00
2024-11-18 18:34:35 +01:00
const auxTable = await config.api.table.save(
saveTableRequest({
primaryDisplay: "name",
schema: {
name: {
name: "name",
type: FieldType.STRING,
constraints: { presence: true },
},
age: {
name: "age",
type: FieldType.NUMBER,
constraints: { presence: true },
},
address: {
name: "address",
type: FieldType.STRING,
constraints: { presence: true },
visible: false,
},
link: {
name: "link",
type: FieldType.LINK,
tableId: aux2Table._id!,
relationshipType: RelationshipType.MANY_TO_MANY,
fieldName: "fk_aux",
constraints: { presence: true },
},
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: "{{ any }}",
constraints: { presence: true },
},
},
})
)
const auxTableId = auxTable._id!
for (const name of generator.unique(() => generator.name(), 10)) {
auxData.push(
await config.api.row.save(auxTableId, {
name,
age: generator.age(),
address: generator.address(),
link: [aux2Data],
})
)
}
const table = await config.api.table.save(
saveTableRequest({
schema: {
title: {
name: "title",
type: FieldType.STRING,
constraints: { presence: true },
},
relWithNoSchema: {
name: "relWithNoSchema",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: auxTableId,
fieldName: "fk_relWithNoSchema",
constraints: { presence: true },
},
relWithEmptySchema: {
name: "relWithEmptySchema",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: auxTableId,
fieldName: "fk_relWithEmptySchema",
constraints: { presence: true },
},
relWithFullSchema: {
name: "relWithFullSchema",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: auxTableId,
fieldName: "fk_relWithFullSchema",
constraints: { presence: true },
},
relWithHalfSchema: {
name: "relWithHalfSchema",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: auxTableId,
fieldName: "fk_relWithHalfSchema",
constraints: { presence: true },
},
relWithIllegalSchema: {
name: "relWithIllegalSchema",
relationshipType: RelationshipType.ONE_TO_MANY,
type: FieldType.LINK,
tableId: auxTableId,
fieldName: "fk_relWithIllegalSchema",
constraints: { presence: true },
},
2024-08-27 17:02:19 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
tableId = table._id!
const view = await config.api.viewV2.create({
name: generator.guid(),
tableId,
schema: {
title: {
visible: true,
2024-08-27 17:02:19 +02:00
},
2024-11-18 18:34:35 +01:00
relWithNoSchema: {
visible: true,
2024-08-27 17:02:19 +02:00
},
2024-11-18 18:34:35 +01:00
relWithEmptySchema: {
visible: true,
columns: {},
2024-08-27 17:02:19 +02:00
},
2024-11-18 18:34:35 +01:00
relWithFullSchema: {
visible: true,
columns: Object.keys(auxTable.schema).reduce<
Record<string, RelationSchemaField>
>((acc, c) => ({ ...acc, [c]: { visible: true } }), {}),
2024-08-27 17:02:19 +02:00
},
2024-11-18 18:34:35 +01:00
relWithHalfSchema: {
visible: true,
columns: {
name: { visible: true },
age: { visible: false, readonly: true },
},
},
relWithIllegalSchema: {
visible: true,
columns: {
name: { visible: true },
address: { visible: true },
unexisting: { visible: true },
},
},
},
2024-08-27 17:02:19 +02:00
})
2024-11-18 18:34:35 +01:00
viewId = view.id
})
2024-11-18 18:34:35 +01:00
const testScenarios: [string, (row: Row) => Promise<Row> | Row][] = [
["get row", (row: Row) => config.api.row.get(viewId, row._id!)],
[
"from view search",
async (row: Row) => {
const { rows } = await config.api.viewV2.search(viewId)
return rows.find(r => r._id === row._id!)
},
],
["from original saved row", (row: Row) => row],
[
"from updated row",
(row: Row) => config.api.row.save(viewId, row),
],
]
it.each(testScenarios)(
"can retrieve rows with populated relationships (via %s)",
async (__, retrieveDelegate) => {
const otherRows = _.sampleSize(auxData, 5)
const row = await config.api.row.save(viewId, {
title: generator.word(),
relWithNoSchema: [otherRows[0]],
relWithEmptySchema: [otherRows[1]],
relWithFullSchema: [otherRows[2]],
relWithHalfSchema: [otherRows[3]],
relWithIllegalSchema: [otherRows[4]],
})
const retrieved = await retrieveDelegate(row)
expect(retrieved).toEqual(
expect.objectContaining({
title: row.title,
relWithNoSchema: [
{
_id: otherRows[0]._id,
primaryDisplay: otherRows[0].name,
},
],
relWithEmptySchema: [
{
_id: otherRows[1]._id,
primaryDisplay: otherRows[1].name,
},
],
relWithFullSchema: [
{
_id: otherRows[2]._id,
primaryDisplay: otherRows[2].name,
name: otherRows[2].name,
age: otherRows[2].age,
id: otherRows[2].id,
},
],
relWithHalfSchema: [
{
_id: otherRows[3]._id,
primaryDisplay: otherRows[3].name,
name: otherRows[3].name,
},
],
relWithIllegalSchema: [
{
_id: otherRows[4]._id,
primaryDisplay: otherRows[4].name,
name: otherRows[4].name,
},
],
})
)
}
2024-08-27 17:02:19 +02:00
)
2024-11-18 18:34:35 +01:00
it.each([
[
"from table fetch",
async (row: Row) => {
const rows = await config.api.row.fetch(tableId)
return rows.find(r => r._id === row._id!)
2024-08-23 10:47:26 +02:00
},
2024-11-18 18:34:35 +01:00
],
[
"from table search",
async (row: Row) => {
const { rows } = await config.api.row.search(tableId)
return rows.find(r => r._id === row._id!)
2024-08-29 11:19:40 +02:00
},
2024-11-18 18:34:35 +01:00
],
])(
"does not enrich when fetching from the table (via %s)",
async (__, retrieveDelegate) => {
2024-08-27 17:02:19 +02:00
const otherRows = _.sampleSize(auxData, 5)
2024-08-29 12:07:08 +02:00
const row = await config.api.row.save(viewId, {
2024-08-27 16:56:47 +02:00
title: generator.word(),
relWithNoSchema: [otherRows[0]],
relWithEmptySchema: [otherRows[1]],
relWithFullSchema: [otherRows[2]],
relWithHalfSchema: [otherRows[3]],
relWithIllegalSchema: [otherRows[4]],
})
const retrieved = await retrieveDelegate(row)
expect(retrieved).toEqual(
expect.objectContaining({
title: row.title,
relWithNoSchema: [
{
_id: otherRows[0]._id,
primaryDisplay: otherRows[0].name,
},
],
relWithEmptySchema: [
{
_id: otherRows[1]._id,
primaryDisplay: otherRows[1].name,
},
],
relWithFullSchema: [
{
_id: otherRows[2]._id,
primaryDisplay: otherRows[2].name,
},
],
relWithHalfSchema: [
{
_id: otherRows[3]._id,
primaryDisplay: otherRows[3].name,
},
],
relWithIllegalSchema: [
{
_id: otherRows[4]._id,
primaryDisplay: otherRows[4].name,
},
],
})
)
}
)
2024-11-05 13:12:45 +01:00
})
2024-08-29 12:30:16 +02:00
2024-11-18 18:34:35 +01:00
isInternal &&
describe("AI fields", () => {
let table: Table
let envCleanup: () => void
2024-08-27 17:02:19 +02:00
2024-11-18 18:34:35 +01:00
beforeAll(async () => {
mocks.licenses.useBudibaseAI()
mocks.licenses.useAICustomConfigs()
2025-02-04 11:43:49 +01:00
envCleanup = setEnv({
OPENAI_API_KEY: "sk-abcdefghijklmnopqrstuvwxyz1234567890abcd",
})
mockChatGPTResponse("Mock LLM Response")
2024-11-18 18:34:35 +01:00
table = await config.api.table.save(
saveTableRequest({
schema: {
ai: {
name: "ai",
type: FieldType.AI,
operation: AIOperationEnum.PROMPT,
prompt: "Convert the following to German: '{{ product }}'",
},
product: {
name: "product",
type: FieldType.STRING,
},
2024-08-29 12:30:16 +02:00
},
2024-11-18 18:34:35 +01:00
})
)
await config.api.row.save(table._id!, {
product: generator.word(),
2024-08-27 17:02:19 +02:00
})
2024-11-18 18:34:35 +01:00
})
2024-08-27 17:02:19 +02:00
2024-11-18 18:34:35 +01:00
afterAll(() => {
nock.cleanAll()
envCleanup()
mocks.licenses.useCloudFree()
2024-11-18 18:34:35 +01:00
})
it("should be able to save a row with an AI column", async () => {
const { rows } = await config.api.row.search(table._id!)
expect(rows.length).toBe(1)
expect(rows[0].ai).toEqual("Mock LLM Response")
})
it("should be able to update a row with an AI column", async () => {
const { rows } = await config.api.row.search(table._id!)
expect(rows.length).toBe(1)
await config.api.row.save(table._id!, {
product: generator.word(),
...rows[0],
})
expect(rows.length).toBe(1)
expect(rows[0].ai).toEqual("Mock LLM Response")
})
})
describe("Formula fields", () => {
let table: Table
let otherTable: Table
let relatedRow: Row, mainRow: Row
beforeAll(async () => {
otherTable = await config.api.table.save(defaultTable())
table = await config.api.table.save(
saveTableRequest({
schema: {
links: {
name: "links",
fieldName: "links",
type: FieldType.LINK,
tableId: otherTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
2024-08-29 12:30:16 +02:00
},
2024-11-18 18:34:35 +01:00
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: "{{ links.0.name }}",
formulaType: FormulaType.DYNAMIC,
2024-08-29 12:30:16 +02:00
},
2024-11-18 18:34:35 +01:00
},
2024-08-29 12:30:16 +02:00
})
)
2024-08-23 10:47:26 +02:00
2024-11-18 18:34:35 +01:00
relatedRow = await config.api.row.save(otherTable._id!, {
name: generator.word(),
description: generator.paragraph(),
})
mainRow = await config.api.row.save(table._id!, {
name: generator.word(),
description: generator.paragraph(),
tableId: table._id!,
links: [relatedRow._id],
})
})
2024-10-14 19:38:34 +02:00
2024-11-18 18:34:35 +01:00
async function updateFormulaColumn(
formula: string,
opts?: {
responseType?: FormulaResponseType
formulaType?: FormulaType
}
) {
table = await config.api.table.save({
...table,
2024-10-14 19:38:34 +02:00
schema: {
2024-11-18 18:34:35 +01:00
...table.schema,
formula: {
name: "formula",
type: FieldType.FORMULA,
2024-11-27 13:21:27 +01:00
formula,
2024-11-18 18:34:35 +01:00
responseType: opts?.responseType,
formulaType: opts?.formulaType || FormulaType.DYNAMIC,
2024-10-14 19:38:34 +02:00
},
},
})
2024-11-18 18:34:35 +01:00
}
2024-10-14 19:38:34 +02:00
2024-11-18 18:34:35 +01:00
it("should be able to search for rows containing formulas", async () => {
const { rows } = await config.api.row.search(table._id!)
expect(rows.length).toBe(1)
expect(rows[0].links.length).toBe(1)
const row = rows[0]
expect(row.formula).toBe(relatedRow.name)
2024-10-14 19:38:34 +02:00
})
2024-11-18 18:34:35 +01:00
it("should coerce - number response type", async () => {
await updateFormulaColumn(encodeJS("return 1"), {
responseType: FieldType.NUMBER,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(1)
})
2024-10-14 19:38:34 +02:00
2024-11-18 18:34:35 +01:00
it("should coerce - boolean response type", async () => {
await updateFormulaColumn(encodeJS("return true"), {
responseType: FieldType.BOOLEAN,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(true)
})
2024-10-14 19:38:34 +02:00
2024-11-18 18:34:35 +01:00
it("should coerce - datetime response type", async () => {
await updateFormulaColumn(encodeJS("return new Date()"), {
responseType: FieldType.DATETIME,
})
const { rows } = await config.api.row.search(table._id!)
expect(isDate(rows[0].formula)).toBe(true)
2024-10-14 19:38:34 +02:00
})
2024-11-18 18:34:35 +01:00
it("should coerce - datetime with invalid value", async () => {
await updateFormulaColumn(encodeJS("return 'a'"), {
responseType: FieldType.DATETIME,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBeUndefined()
})
2024-11-18 18:34:35 +01:00
it("should coerce handlebars", async () => {
await updateFormulaColumn("{{ add 1 1 }}", {
responseType: FieldType.NUMBER,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(2)
})
2024-11-18 18:34:35 +01:00
it("should coerce handlebars to string (default)", async () => {
await updateFormulaColumn("{{ add 1 1 }}", {
responseType: FieldType.STRING,
})
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe("2")
})
2024-11-18 18:34:35 +01:00
isInternal &&
it("should coerce a static handlebars formula", async () => {
await updateFormulaColumn(encodeJS("return 1"), {
responseType: FieldType.NUMBER,
formulaType: FormulaType.STATIC,
})
// save the row to store the static value
await config.api.row.save(table._id!, mainRow)
const { rows } = await config.api.row.search(table._id!)
expect(rows[0].formula).toBe(1)
})
})
2024-11-18 18:34:35 +01:00
describe("Formula JS protection", () => {
it("should time out JS execution if a single cell takes too long", async () => {
await withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
const js = encodeJS(
`
let i = 0;
while (true) {
i++;
}
return i;
`
2024-11-18 18:34:35 +01:00
)
2024-11-18 18:34:35 +01:00
const table = await config.api.table.save(
saveTableRequest({
schema: {
text: {
name: "text",
type: FieldType.STRING,
},
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: js,
formulaType: FormulaType.DYNAMIC,
},
},
})
)
await config.api.row.save(table._id!, { text: "foo" })
const { rows } = await config.api.row.search(table._id!)
expect(rows).toHaveLength(1)
const row = rows[0]
expect(row.text).toBe("foo")
expect(row.formula).toBe("Timed out while executing JS")
})
2024-11-18 18:34:35 +01:00
})
2024-11-18 18:34:35 +01:00
it("should time out JS execution if a multiple cells take too long", async () => {
await withEnv(
{
JS_PER_INVOCATION_TIMEOUT_MS: 40,
JS_PER_REQUEST_TIMEOUT_MS: 80,
},
async () => {
const js = encodeJS(
`
let i = 0;
while (true) {
i++;
}
return i;
`
2024-11-18 18:34:35 +01:00
)
const table = await config.api.table.save(
saveTableRequest({
schema: {
text: {
name: "text",
type: FieldType.STRING,
},
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: js,
formulaType: FormulaType.DYNAMIC,
},
},
})
)
for (let i = 0; i < 10; i++) {
await config.api.row.save(table._id!, { text: "foo" })
}
// Run this test 3 times to make sure that there's no cross-request
// pollution of the execution time tracking.
for (let reqs = 0; reqs < 3; reqs++) {
const { rows } = await config.api.row.search(table._id!)
expect(rows).toHaveLength(10)
let i = 0
for (; i < 10; i++) {
const row = rows[i]
if (row.formula !== JsTimeoutError.message) {
break
}
}
// Given the execution times are not deterministic, we can't be sure
// of the exact number of rows that were executed before the timeout
// but it should absolutely be at least 1.
expect(i).toBeGreaterThan(0)
expect(i).toBeLessThan(5)
for (; i < 10; i++) {
const row = rows[i]
expect(row.text).toBe("foo")
expect(row.formula).toStartWith("CPU time limit exceeded ")
}
}
}
)
2024-11-18 18:34:35 +01:00
})
2024-11-18 18:34:35 +01:00
it("should not carry over context between formulas", async () => {
const js = encodeJS(`return $("[text]");`)
2024-03-13 17:32:33 +01:00
const table = await config.api.table.save(
saveTableRequest({
schema: {
text: {
name: "text",
type: FieldType.STRING,
},
formula: {
name: "formula",
type: FieldType.FORMULA,
formula: js,
2024-03-13 17:32:33 +01:00
formulaType: FormulaType.DYNAMIC,
},
},
2024-03-13 17:32:33 +01:00
})
)
for (let i = 0; i < 10; i++) {
2024-11-18 18:34:35 +01:00
await config.api.row.save(table._id!, { text: `foo${i}` })
}
2024-11-18 18:34:35 +01:00
const { rows } = await config.api.row.search(table._id!)
expect(rows).toHaveLength(10)
const formulaValues = rows.map(r => r.formula)
expect(formulaValues).toEqual(
expect.arrayContaining([
"foo0",
"foo1",
"foo2",
"foo3",
"foo4",
"foo5",
"foo6",
"foo7",
"foo8",
"foo9",
])
)
2024-03-13 17:32:33 +01:00
})
2024-11-18 18:34:35 +01:00
})
2024-11-21 18:33:47 +01:00
2025-01-16 16:17:40 +01:00
if (isInternal || isMSSQL) {
describe("Fields with spaces", () => {
let table: Table
let otherTable: Table
let relatedRow: Row
2025-01-16 16:07:32 +01:00
2025-01-16 16:17:40 +01:00
beforeAll(async () => {
otherTable = await config.api.table.save(defaultTable())
table = await config.api.table.save(
saveTableRequest({
schema: {
links: {
name: "links",
fieldName: "links",
type: FieldType.LINK,
tableId: otherTable._id!,
relationshipType: RelationshipType.ONE_TO_MANY,
},
"nameWithSpace ": {
name: "nameWithSpace ",
type: FieldType.STRING,
},
2025-01-16 16:07:32 +01:00
},
2025-01-16 16:17:40 +01:00
})
)
relatedRow = await config.api.row.save(otherTable._id!, {
name: generator.word(),
description: generator.paragraph(),
})
await config.api.row.save(table._id!, {
"nameWithSpace ": generator.word(),
tableId: table._id!,
links: [relatedRow._id],
2025-01-16 16:07:32 +01:00
})
})
2025-01-16 16:17:40 +01:00
it("Successfully returns rows that have spaces in their field names", async () => {
const { rows } = await config.api.row.search(table._id!)
expect(rows.length).toBe(1)
const row = rows[0]
expect(row["nameWithSpace "]).toBeDefined()
})
2025-01-16 16:07:32 +01:00
})
2025-01-16 16:17:40 +01:00
}
2025-01-16 16:07:32 +01:00
2024-11-26 13:00:30 +01:00
if (!isInternal && !isOracle) {
2024-11-21 18:33:47 +01:00
describe("bigint ids", () => {
2024-11-25 18:56:16 +01:00
let table1: Table, table2: Table
let table1Name: string, table2Name: string
2024-11-21 18:33:47 +01:00
beforeAll(async () => {
2024-11-25 18:56:16 +01:00
table1Name = `table1-${generator.guid().substring(0, 5)}`
await client!.schema.createTable(table1Name, table => {
2024-11-26 13:00:30 +01:00
table.bigInteger("table1Id").primary()
2024-11-21 18:33:47 +01:00
})
2024-11-25 18:56:16 +01:00
table2Name = `table2-${generator.guid().substring(0, 5)}`
await client!.schema.createTable(table2Name, table => {
2024-11-26 13:00:30 +01:00
table.bigInteger("table2Id").primary()
2024-11-21 18:33:47 +01:00
table
2024-11-25 18:58:40 +01:00
.bigInteger("table1Ref")
.references("table1Id")
2024-11-25 18:56:16 +01:00
.inTable(table1Name)
2024-11-21 18:33:47 +01:00
})
const resp = await config.api.datasource.fetchSchema({
datasourceId: datasource!._id!,
})
const tables = Object.values(resp.datasource.entities || {})
2024-11-25 18:56:16 +01:00
table1 = tables.find(t => t.name === table1Name)!
table2 = tables.find(t => t.name === table2Name)!
2024-11-21 18:33:47 +01:00
await config.api.datasource.addExistingRelationship({
one: {
2024-11-25 18:56:16 +01:00
tableId: table2._id!,
relationshipName: "one",
2024-11-25 18:58:40 +01:00
foreignKey: "table1Ref",
},
many: {
2024-11-25 18:56:16 +01:00
tableId: table1._id!,
relationshipName: "many",
2024-11-25 18:58:40 +01:00
primaryKey: "table1Id",
2024-11-21 18:33:47 +01:00
},
})
})
2024-11-26 13:00:42 +01:00
it("should be able to fetch rows with related bigint ids", async () => {
2024-11-26 13:00:30 +01:00
const row = await config.api.row.save(table1._id!, {
table1Id: "1",
})
await config.api.row.save(table2._id!, {
table2Id: "2",
table1Ref: row.table1Id,
})
2024-11-21 18:33:47 +01:00
2024-11-26 13:00:30 +01:00
let resp = await config.api.row.search(table1._id!)
2024-11-27 17:37:29 +01:00
expect(resp.rows).toHaveLength(1)
expect(resp.rows[0]._id).toBe("%5B'1'%5D")
expect(resp.rows[0].many).toHaveLength(1)
expect(resp.rows[0].many[0]._id).toBe("%5B'2'%5D")
2024-11-26 13:00:30 +01:00
resp = await config.api.row.search(table2._id!)
2024-11-27 17:37:29 +01:00
expect(resp.rows).toHaveLength(1)
expect(resp.rows[0]._id).toBe("%5B'2'%5D")
expect(resp.rows[0].one).toHaveLength(1)
expect(resp.rows[0].one[0]._id).toBe("%5B'1'%5D")
2024-11-21 18:33:47 +01:00
})
})
}
2024-11-18 18:34:35 +01:00
}
)
}