Merge branch 'master' into view-calculation-readonly

This commit is contained in:
Sam Rose 2024-10-09 14:19:07 +01:00 committed by GitHub
commit 71fdae3383
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
24 changed files with 230 additions and 54 deletions

View File

@ -1,6 +1,6 @@
{ {
"$schema": "node_modules/lerna/schemas/lerna-schema.json", "$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.12", "version": "2.32.14",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -213,17 +213,21 @@ export class DatabaseImpl implements Database {
async getMultiple<T extends Document>( async getMultiple<T extends Document>(
ids: string[], ids: string[],
opts?: { allowMissing?: boolean } opts?: { allowMissing?: boolean; excludeDocs?: boolean }
): Promise<T[]> { ): Promise<T[]> {
// get unique // get unique
ids = [...new Set(ids)] ids = [...new Set(ids)]
const includeDocs = !opts?.excludeDocs
const response = await this.allDocs<T>({ const response = await this.allDocs<T>({
keys: ids, keys: ids,
include_docs: true, include_docs: includeDocs,
}) })
const rowUnavailable = (row: RowResponse<T>) => { const rowUnavailable = (row: RowResponse<T>) => {
// row is deleted - key lookup can return this // row is deleted - key lookup can return this
if (row.doc == null || ("deleted" in row.value && row.value.deleted)) { if (
(includeDocs && row.doc == null) ||
(row.value && "deleted" in row.value && row.value.deleted)
) {
return true return true
} }
return row.error === "not_found" return row.error === "not_found"
@ -237,7 +241,7 @@ export class DatabaseImpl implements Database {
const missingIds = missing.map(row => row.key).join(", ") const missingIds = missing.map(row => row.key).join(", ")
throw new Error(`Unable to get documents: ${missingIds}`) throw new Error(`Unable to get documents: ${missingIds}`)
} }
return rows.map(row => row.doc!) return rows.map(row => (includeDocs ? row.doc! : row.value))
} }
async remove(idOrDoc: string | Document, rev?: string) { async remove(idOrDoc: string | Document, rev?: string) {

View File

@ -24,6 +24,7 @@ import * as context from "../context"
import { getGlobalDB } from "../context" import { getGlobalDB } from "../context"
import { isCreator } from "./utils" import { isCreator } from "./utils"
import { UserDB } from "./db" import { UserDB } from "./db"
import { dataFilters } from "@budibase/shared-core"
type GetOpts = { cleanup?: boolean } type GetOpts = { cleanup?: boolean }
@ -262,10 +263,17 @@ export async function paginatedUsers({
userList = await bulkGetGlobalUsersById(query?.oneOf?._id, { userList = await bulkGetGlobalUsersById(query?.oneOf?._id, {
cleanup: true, cleanup: true,
}) })
} else if (query) {
// TODO: this should use SQS search, but the logic is built in the 'server' package. Using the in-memory filtering to get this working meanwhile
const response = await db.allDocs<User>(
getGlobalUserParams(null, { ...opts, limit: undefined })
)
userList = response.rows.map(row => row.doc!)
userList = dataFilters.search(userList, { query, limit: opts.limit }).rows
} else { } else {
// no search, query allDocs // no search, query allDocs
const response = await db.allDocs(getGlobalUserParams(null, opts)) const response = await db.allDocs<User>(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc) userList = response.rows.map(row => row.doc!)
} }
return pagination(userList, pageSize, { return pagination(userList, pageSize, {
paginate: true, paginate: true,

View File

@ -40,7 +40,7 @@ export const buildTableEndpoints = API => ({
sortType, sortType,
paginate, paginate,
}) => { }) => {
if (!tableId || !query) { if (!tableId) {
return { return {
rows: [], rows: [],
} }

View File

@ -31,7 +31,7 @@ function getDatasourceId(table: Table) {
return breakExternalTableId(table._id).datasourceId return breakExternalTableId(table._id).datasourceId
} }
export async function save( export async function updateTable(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>, ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn renaming?: RenameColumn
) { ) {

View File

@ -102,18 +102,22 @@ export async function find(ctx: UserCtx<void, TableResponse>) {
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) { export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const appId = ctx.appId const appId = ctx.appId
const table = ctx.request.body const { rows, ...table } = ctx.request.body
const isImport = table.rows const isImport = rows
const renaming = ctx.request.body._rename const renaming = ctx.request.body._rename
const isCreate = !table._id
checkDefaultFields(table) checkDefaultFields(table)
const api = pickApi({ table }) let savedTable: Table
let savedTable = await api.save(ctx, renaming) if (isCreate) {
if (!table._id) { savedTable = await sdk.tables.create(table, rows, ctx.user._id)
savedTable = await sdk.tables.enrichViewSchemas(savedTable) savedTable = await sdk.tables.enrichViewSchemas(savedTable)
await events.table.created(savedTable) await events.table.created(savedTable)
} else { } else {
const api = pickApi({ table })
savedTable = await api.updateTable(ctx, renaming)
await events.table.updated(savedTable) await events.table.updated(savedTable)
} }
if (renaming) { if (renaming) {

View File

@ -12,7 +12,7 @@ import {
} from "@budibase/types" } from "@budibase/types"
import sdk from "../../../sdk" import sdk from "../../../sdk"
export async function save( export async function updateTable(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>, ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn renaming?: RenameColumn
) { ) {
@ -25,19 +25,16 @@ export async function save(
sourceType: rest.sourceType || TableSourceType.INTERNAL, sourceType: rest.sourceType || TableSourceType.INTERNAL,
} }
const isImport = !!rows
if (!tableToSave.views) { if (!tableToSave.views) {
tableToSave.views = {} tableToSave.views = {}
} }
try { try {
const { table } = await sdk.tables.internal.save(tableToSave, { const { table } = await sdk.tables.internal.save(tableToSave, {
user: ctx.user, userId: ctx.user._id,
rowsToImport: rows, rowsToImport: rows,
tableId: ctx.request.body._id, tableId: ctx.request.body._id,
renaming, renaming,
isImport,
}) })
return table return table
@ -72,7 +69,7 @@ export async function bulkImport(
await handleDataImport(table, { await handleDataImport(table, {
importRows: rows, importRows: rows,
identifierFields, identifierFields,
user: ctx.user, userId: ctx.user._id,
}) })
return table return table
} }

View File

@ -41,7 +41,7 @@ describe("utils", () => {
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }] const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
const result = await importToRows(data, table, config.user) const result = await importToRows(data, table, config.user?._id)
expect(result).toEqual([ expect(result).toEqual([
expect.objectContaining({ expect.objectContaining({
autoId: 1, autoId: 1,

View File

@ -18,7 +18,6 @@ import { quotas } from "@budibase/pro"
import { events, context, features } from "@budibase/backend-core" import { events, context, features } from "@budibase/backend-core"
import { import {
AutoFieldSubType, AutoFieldSubType,
ContextUser,
Datasource, Datasource,
Row, Row,
SourceName, SourceName,
@ -122,7 +121,7 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
export async function importToRows( export async function importToRows(
data: Row[], data: Row[],
table: Table, table: Table,
user?: ContextUser, userId?: string,
opts?: { keepCouchId: boolean } opts?: { keepCouchId: boolean }
) { ) {
const originalTable = table const originalTable = table
@ -136,7 +135,7 @@ export async function importToRows(
// We use a reference to table here and update it after input processing, // We use a reference to table here and update it after input processing,
// so that we can auto increment auto IDs in imported data properly // so that we can auto increment auto IDs in imported data properly
const processed = await inputProcessing(user?._id, table, row, { const processed = await inputProcessing(userId, table, row, {
noAutoRelationships: true, noAutoRelationships: true,
}) })
row = processed row = processed
@ -167,11 +166,10 @@ export async function importToRows(
export async function handleDataImport( export async function handleDataImport(
table: Table, table: Table,
opts?: { identifierFields?: string[]; user?: ContextUser; importRows?: Row[] } opts?: { identifierFields?: string[]; userId?: string; importRows?: Row[] }
) { ) {
const schema = table.schema const schema = table.schema
const identifierFields = opts?.identifierFields || [] const identifierFields = opts?.identifierFields || []
const user = opts?.user
const importRows = opts?.importRows const importRows = opts?.importRows
if (!importRows || !isRows(importRows) || !isSchema(schema)) { if (!importRows || !isRows(importRows) || !isSchema(schema)) {
@ -181,7 +179,7 @@ export async function handleDataImport(
const db = context.getAppDB() const db = context.getAppDB()
const data = parse(importRows, table) const data = parse(importRows, table)
const finalData = await importToRows(data, table, user, { const finalData = await importToRows(data, table, opts?.userId, {
keepCouchId: identifierFields.includes("_id"), keepCouchId: identifierFields.includes("_id"),
}) })
@ -282,22 +280,22 @@ export function checkStaticTables(table: Table) {
class TableSaveFunctions { class TableSaveFunctions {
db: Database db: Database
user?: ContextUser userId?: string
oldTable?: Table oldTable?: Table
importRows?: Row[] importRows?: Row[]
rows: Row[] rows: Row[]
constructor({ constructor({
user, userId,
oldTable, oldTable,
importRows, importRows,
}: { }: {
user?: ContextUser userId?: string
oldTable?: Table oldTable?: Table
importRows?: Row[] importRows?: Row[]
}) { }) {
this.db = context.getAppDB() this.db = context.getAppDB()
this.user = user this.userId = userId
this.oldTable = oldTable this.oldTable = oldTable
this.importRows = importRows this.importRows = importRows
// any rows that need updated // any rows that need updated
@ -329,7 +327,7 @@ class TableSaveFunctions {
table = await handleSearchIndexes(table) table = await handleSearchIndexes(table)
table = await handleDataImport(table, { table = await handleDataImport(table, {
importRows: this.importRows, importRows: this.importRows,
user: this.user, userId: this.userId,
}) })
if (await features.flags.isEnabled("SQS")) { if (await features.flags.isEnabled("SQS")) {
await sdk.tables.sqs.addTable(table) await sdk.tables.sqs.addTable(table)

View File

@ -28,6 +28,7 @@ describe.each(
const config = setup.getConfig() const config = setup.getConfig()
const isOracle = dbName === DatabaseName.ORACLE const isOracle = dbName === DatabaseName.ORACLE
const isMsSQL = dbName === DatabaseName.SQL_SERVER const isMsSQL = dbName === DatabaseName.SQL_SERVER
const isPostgres = dbName === DatabaseName.POSTGRES
let rawDatasource: Datasource let rawDatasource: Datasource
let datasource: Datasource let datasource: Datasource
@ -47,6 +48,9 @@ describe.each(
transformer: "return data", transformer: "return data",
readable: true, readable: true,
} }
if (query.fields?.sql && typeof query.fields.sql !== "string") {
throw new Error("Unable to create with knex structure in 'sql' field")
}
return await config.api.query.save( return await config.api.query.save(
{ ...defaultQuery, ...query }, { ...defaultQuery, ...query },
expectations expectations
@ -207,6 +211,31 @@ describe.each(
expect(prodQuery.parameters).toBeUndefined() expect(prodQuery.parameters).toBeUndefined()
expect(prodQuery.schema).toBeDefined() expect(prodQuery.schema).toBeDefined()
}) })
isPostgres &&
it("should be able to handle a JSON aggregate with newlines", async () => {
const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)`
const query = await createQuery({
fields: {
sql: client("test_table")
.select([
"*",
client.raw(
`${jsonStatement} as json,\n${jsonStatement} as json2`
),
])
.toString(),
},
})
const res = await config.api.query.execute(
query._id!,
{},
{
status: 200,
}
)
expect(res).toBeDefined()
})
}) })
}) })

View File

@ -1846,7 +1846,7 @@ describe.each([
}) })
describe("exportRows", () => { describe("exportRows", () => {
beforeAll(async () => { beforeEach(async () => {
table = await config.api.table.save(defaultTable()) table = await config.api.table.save(defaultTable())
}) })
@ -1883,6 +1883,16 @@ describe.each([
}) })
}) })
it("should allow exporting without filtering", async () => {
const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!)
const results = JSON.parse(res)
expect(results.length).toEqual(1)
const row = results[0]
expect(row._id).toEqual(existing._id)
})
it("should allow exporting only certain columns", async () => { it("should allow exporting only certain columns", async () => {
const existing = await config.api.row.save(table._id!, {}) const existing = await config.api.row.save(table._id!, {})
const res = await config.api.row.exportRows(table._id!, { const res = await config.api.row.exportRows(table._id!, {

View File

@ -187,7 +187,6 @@ describe.each([
if (isInMemory) { if (isInMemory) {
return dataFilters.search(_.cloneDeep(rows), { return dataFilters.search(_.cloneDeep(rows), {
...this.query, ...this.query,
tableId: tableOrViewId,
}) })
} else { } else {
return config.api.row.search(tableOrViewId, this.query) return config.api.row.search(tableOrViewId, this.query)

View File

@ -221,9 +221,15 @@ class LinkController {
link.id !== row._id && link.fieldName === linkedSchema.name link.id !== row._id && link.fieldName === linkedSchema.name
) )
// check all the related rows exist
const foundRecords = await this._db.getMultiple(
links.map(l => l.id),
{ allowMissing: true, excludeDocs: true }
)
// The 1 side of 1:N is already related to something else // The 1 side of 1:N is already related to something else
// You must remove the existing relationship // You must remove the existing relationship
if (links.length > 0) { if (foundRecords.length > 0) {
throw new Error( throw new Error(
`1:N Relationship Error: Record already linked to another.` `1:N Relationship Error: Record already linked to another.`
) )

View File

@ -41,7 +41,7 @@ if (types) {
types.setTypeParser(1184, (val: any) => val) // timestampz types.setTypeParser(1184, (val: any) => val) // timestampz
} }
const JSON_REGEX = /'{.*}'::json/s const JSON_REGEX = /'{\s*.*?\s*}'::json/gs
const Sql = sql.Sql const Sql = sql.Sql
interface PostgresConfig { interface PostgresConfig {

View File

@ -62,10 +62,10 @@ export async function exportRows(
).rows.map(row => row.doc!) ).rows.map(row => row.doc!)
result = await outputProcessing(table, response) result = await outputProcessing(table, response)
} else if (query) { } else {
let searchResponse = await sdk.rows.search({ let searchResponse = await sdk.rows.search({
tableId, tableId,
query, query: query || {},
sort, sort,
sortOrder, sortOrder,
}) })

View File

@ -0,0 +1,19 @@
import { Row, Table } from "@budibase/types"
import * as external from "./external"
import * as internal from "./internal"
import { isExternal } from "./utils"
export async function create(
table: Omit<Table, "_id" | "_rev">,
rows?: Row[],
userId?: string
): Promise<Table> {
let createdTable: Table
if (isExternal({ table })) {
createdTable = await external.create(table)
} else {
createdTable = await internal.create(table, rows, userId)
}
return createdTable
}

View File

@ -8,8 +8,11 @@ import {
ViewV2, ViewV2,
AutoFieldSubType, AutoFieldSubType,
} from "@budibase/types" } from "@budibase/types"
import { context } from "@budibase/backend-core" import { context, HTTPError } from "@budibase/backend-core"
import { buildExternalTableId } from "../../../../integrations/utils" import {
breakExternalTableId,
buildExternalTableId,
} from "../../../../integrations/utils"
import { import {
foreignKeyStructure, foreignKeyStructure,
hasTypeChanged, hasTypeChanged,
@ -86,6 +89,35 @@ function validate(table: Table, oldTable?: Table) {
} }
} }
function getDatasourceId(table: Table) {
if (!table) {
throw new Error("No table supplied")
}
if (table.sourceId) {
return table.sourceId
}
if (!table._id) {
throw new Error("No table ID supplied")
}
return breakExternalTableId(table._id).datasourceId
}
export async function create(table: Omit<Table, "_id" | "_rev">) {
const datasourceId = getDatasourceId(table)
const tableToCreate = { ...table, created: true }
try {
const result = await save(datasourceId!, tableToCreate)
return result.table
} catch (err: any) {
if (err instanceof Error) {
throw new HTTPError(err.message, 400)
} else {
throw new HTTPError(err?.message || err, err.status || 500)
}
}
}
export async function save( export async function save(
datasourceId: string, datasourceId: string,
update: Table, update: Table,

View File

@ -1,5 +1,6 @@
import { populateExternalTableSchemas } from "./validation" import { populateExternalTableSchemas } from "./validation"
import * as getters from "./getters" import * as getters from "./getters"
import * as create from "./create"
import * as updates from "./update" import * as updates from "./update"
import * as utils from "./utils" import * as utils from "./utils"
import { migrate } from "./migration" import { migrate } from "./migration"
@ -7,6 +8,7 @@ import * as sqs from "./internal/sqs"
export default { export default {
populateExternalTableSchemas, populateExternalTableSchemas,
...create,
...updates, ...updates,
...getters, ...getters,
...utils, ...utils,

View File

@ -5,7 +5,7 @@ import {
ViewStatisticsSchema, ViewStatisticsSchema,
ViewV2, ViewV2,
Row, Row,
ContextUser, TableSourceType,
} from "@budibase/types" } from "@budibase/types"
import { import {
hasTypeChanged, hasTypeChanged,
@ -16,18 +16,56 @@ import { EventType, updateLinks } from "../../../../db/linkedRows"
import { cloneDeep } from "lodash/fp" import { cloneDeep } from "lodash/fp"
import isEqual from "lodash/isEqual" import isEqual from "lodash/isEqual"
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula" import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
import { context } from "@budibase/backend-core" import { context, HTTPError } from "@budibase/backend-core"
import { findDuplicateInternalColumns } from "@budibase/shared-core" import { findDuplicateInternalColumns } from "@budibase/shared-core"
import { getTable } from "../getters" import { getTable } from "../getters"
import { checkAutoColumns } from "./utils" import { checkAutoColumns } from "./utils"
import * as viewsSdk from "../../views" import * as viewsSdk from "../../views"
import { getRowParams } from "../../../../db/utils" import { generateTableID, getRowParams } from "../../../../db/utils"
import { quotas } from "@budibase/pro" import { quotas } from "@budibase/pro"
export async function create(
table: Omit<Table, "_id" | "_rev">,
rows?: Row[],
userId?: string
) {
const tableId = generateTableID()
let tableToSave: Table = {
_id: tableId,
...table,
// Ensure these fields are populated, even if not sent in the request
type: table.type || "table",
sourceType: TableSourceType.INTERNAL,
}
const isImport = !!rows
if (!tableToSave.views) {
tableToSave.views = {}
}
try {
const { table } = await save(tableToSave, {
userId,
rowsToImport: rows,
isImport,
})
return table
} catch (err: any) {
if (err instanceof Error) {
throw new HTTPError(err.message, 400)
} else {
throw new HTTPError(err.message || err, err.status || 500)
}
}
}
export async function save( export async function save(
table: Table, table: Table,
opts?: { opts?: {
user?: ContextUser userId?: string
tableId?: string tableId?: string
rowsToImport?: Row[] rowsToImport?: Row[]
renaming?: RenameColumn renaming?: RenameColumn
@ -63,7 +101,7 @@ export async function save(
// saving a table is a complex operation, involving many different steps, this // saving a table is a complex operation, involving many different steps, this
// has been broken out into a utility to make it more obvious/easier to manipulate // has been broken out into a utility to make it more obvious/easier to manipulate
const tableSaveFunctions = new TableSaveFunctions({ const tableSaveFunctions = new TableSaveFunctions({
user: opts?.user, userId: opts?.userId,
oldTable, oldTable,
importRows: opts?.rowsToImport, importRows: opts?.rowsToImport,
}) })

View File

@ -105,7 +105,7 @@ export class RowAPI extends TestAPI {
exportRows = async ( exportRows = async (
tableId: string, tableId: string,
body: ExportRowsRequest, body?: ExportRowsRequest,
format: RowExportFormat = RowExportFormat.JSON, format: RowExportFormat = RowExportFormat.JSON,
expectations?: Expectations expectations?: Expectations
) => { ) => {

View File

@ -639,19 +639,19 @@ export function fixupFilterArrays(filters: SearchFilters) {
return filters return filters
} }
export function search<T>( export function search<T extends Record<string, any>>(
docs: Record<string, T>[], docs: T[],
query: RowSearchParams query: Omit<RowSearchParams, "tableId">
): SearchResponse<Record<string, T>> { ): SearchResponse<T> {
let result = runQuery(docs, query.query) let result = runQuery(docs, query.query)
if (query.sort) { if (query.sort) {
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING) result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
} }
let totalRows = result.length const totalRows = result.length
if (query.limit) { if (query.limit) {
result = limit(result, query.limit.toString()) result = limit(result, query.limit.toString())
} }
const response: SearchResponse<Record<string, any>> = { rows: result } const response: SearchResponse<T> = { rows: result }
if (query.countRows) { if (query.countRows) {
response.totalRows = totalRows response.totalRows = totalRows
} }

View File

@ -5,6 +5,7 @@ import {
SearchFilters, SearchFilters,
BasicOperator, BasicOperator,
ArrayOperator, ArrayOperator,
isLogicalSearchOperator,
} from "@budibase/types" } from "@budibase/types"
import * as Constants from "./constants" import * as Constants from "./constants"
import { removeKeyNumbering } from "./filters" import { removeKeyNumbering } from "./filters"
@ -97,10 +98,20 @@ export function isSupportedUserSearch(query: SearchFilters) {
{ op: BasicOperator.EQUAL, key: "_id" }, { op: BasicOperator.EQUAL, key: "_id" },
{ op: ArrayOperator.ONE_OF, key: "_id" }, { op: ArrayOperator.ONE_OF, key: "_id" },
] ]
for (let [key, operation] of Object.entries(query)) { for (const [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") { if (typeof operation !== "object") {
return false return false
} }
if (isLogicalSearchOperator(key)) {
for (const condition of query[key]!.conditions) {
if (!isSupportedUserSearch(condition)) {
return false
}
}
return true
}
const fields = Object.keys(operation || {}) const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore // this filter doesn't contain options - ignore
if (fields.length === 0) { if (fields.length === 0) {

View File

@ -133,7 +133,7 @@ export interface Database {
exists(docId: string): Promise<boolean> exists(docId: string): Promise<boolean>
getMultiple<T extends Document>( getMultiple<T extends Document>(
ids: string[], ids: string[],
opts?: { allowMissing?: boolean } opts?: { allowMissing?: boolean; excludeDocs?: boolean }
): Promise<T[]> ): Promise<T[]>
remove(idOrDoc: Document): Promise<Nano.DocumentDestroyResponse> remove(idOrDoc: Document): Promise<Nano.DocumentDestroyResponse>
remove(idOrDoc: string, rev?: string): Promise<Nano.DocumentDestroyResponse> remove(idOrDoc: string, rev?: string): Promise<Nano.DocumentDestroyResponse>

View File

@ -741,6 +741,25 @@ describe("/api/global/users", () => {
it("should throw an error if public query performed", async () => { it("should throw an error if public query performed", async () => {
await config.api.users.searchUsers({}, { status: 403, noHeaders: true }) await config.api.users.searchUsers({}, { status: 403, noHeaders: true })
}) })
it("should be able to search using logical conditions", async () => {
const user = await config.createUser()
const response = await config.api.users.searchUsers({
query: {
$and: {
conditions: [
{
$and: {
conditions: [{ string: { email: user.email } }],
},
},
],
},
},
})
expect(response.body.data.length).toBe(1)
expect(response.body.data[0].email).toBe(user.email)
})
}) })
describe("DELETE /api/global/users/:userId", () => { describe("DELETE /api/global/users/:userId", () => {