Merge branch 'master' into view-calculation-readonly
This commit is contained in:
commit
71fdae3383
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
|
||||
"version": "2.32.12",
|
||||
"version": "2.32.14",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -213,17 +213,21 @@ export class DatabaseImpl implements Database {
|
|||
|
||||
async getMultiple<T extends Document>(
|
||||
ids: string[],
|
||||
opts?: { allowMissing?: boolean }
|
||||
opts?: { allowMissing?: boolean; excludeDocs?: boolean }
|
||||
): Promise<T[]> {
|
||||
// get unique
|
||||
ids = [...new Set(ids)]
|
||||
const includeDocs = !opts?.excludeDocs
|
||||
const response = await this.allDocs<T>({
|
||||
keys: ids,
|
||||
include_docs: true,
|
||||
include_docs: includeDocs,
|
||||
})
|
||||
const rowUnavailable = (row: RowResponse<T>) => {
|
||||
// row is deleted - key lookup can return this
|
||||
if (row.doc == null || ("deleted" in row.value && row.value.deleted)) {
|
||||
if (
|
||||
(includeDocs && row.doc == null) ||
|
||||
(row.value && "deleted" in row.value && row.value.deleted)
|
||||
) {
|
||||
return true
|
||||
}
|
||||
return row.error === "not_found"
|
||||
|
@ -237,7 +241,7 @@ export class DatabaseImpl implements Database {
|
|||
const missingIds = missing.map(row => row.key).join(", ")
|
||||
throw new Error(`Unable to get documents: ${missingIds}`)
|
||||
}
|
||||
return rows.map(row => row.doc!)
|
||||
return rows.map(row => (includeDocs ? row.doc! : row.value))
|
||||
}
|
||||
|
||||
async remove(idOrDoc: string | Document, rev?: string) {
|
||||
|
|
|
@ -24,6 +24,7 @@ import * as context from "../context"
|
|||
import { getGlobalDB } from "../context"
|
||||
import { isCreator } from "./utils"
|
||||
import { UserDB } from "./db"
|
||||
import { dataFilters } from "@budibase/shared-core"
|
||||
|
||||
type GetOpts = { cleanup?: boolean }
|
||||
|
||||
|
@ -262,10 +263,17 @@ export async function paginatedUsers({
|
|||
userList = await bulkGetGlobalUsersById(query?.oneOf?._id, {
|
||||
cleanup: true,
|
||||
})
|
||||
} else if (query) {
|
||||
// TODO: this should use SQS search, but the logic is built in the 'server' package. Using the in-memory filtering to get this working meanwhile
|
||||
const response = await db.allDocs<User>(
|
||||
getGlobalUserParams(null, { ...opts, limit: undefined })
|
||||
)
|
||||
userList = response.rows.map(row => row.doc!)
|
||||
userList = dataFilters.search(userList, { query, limit: opts.limit }).rows
|
||||
} else {
|
||||
// no search, query allDocs
|
||||
const response = await db.allDocs(getGlobalUserParams(null, opts))
|
||||
userList = response.rows.map((row: any) => row.doc)
|
||||
const response = await db.allDocs<User>(getGlobalUserParams(null, opts))
|
||||
userList = response.rows.map(row => row.doc!)
|
||||
}
|
||||
return pagination(userList, pageSize, {
|
||||
paginate: true,
|
||||
|
|
|
@ -40,7 +40,7 @@ export const buildTableEndpoints = API => ({
|
|||
sortType,
|
||||
paginate,
|
||||
}) => {
|
||||
if (!tableId || !query) {
|
||||
if (!tableId) {
|
||||
return {
|
||||
rows: [],
|
||||
}
|
||||
|
|
|
@ -31,7 +31,7 @@ function getDatasourceId(table: Table) {
|
|||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
export async function save(
|
||||
export async function updateTable(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
|
|
|
@ -102,18 +102,22 @@ export async function find(ctx: UserCtx<void, TableResponse>) {
|
|||
|
||||
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
|
||||
const appId = ctx.appId
|
||||
const table = ctx.request.body
|
||||
const isImport = table.rows
|
||||
const { rows, ...table } = ctx.request.body
|
||||
const isImport = rows
|
||||
const renaming = ctx.request.body._rename
|
||||
|
||||
const isCreate = !table._id
|
||||
|
||||
checkDefaultFields(table)
|
||||
|
||||
const api = pickApi({ table })
|
||||
let savedTable = await api.save(ctx, renaming)
|
||||
if (!table._id) {
|
||||
let savedTable: Table
|
||||
if (isCreate) {
|
||||
savedTable = await sdk.tables.create(table, rows, ctx.user._id)
|
||||
savedTable = await sdk.tables.enrichViewSchemas(savedTable)
|
||||
await events.table.created(savedTable)
|
||||
} else {
|
||||
const api = pickApi({ table })
|
||||
savedTable = await api.updateTable(ctx, renaming)
|
||||
await events.table.updated(savedTable)
|
||||
}
|
||||
if (renaming) {
|
||||
|
|
|
@ -12,7 +12,7 @@ import {
|
|||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
export async function save(
|
||||
export async function updateTable(
|
||||
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
|
||||
renaming?: RenameColumn
|
||||
) {
|
||||
|
@ -25,19 +25,16 @@ export async function save(
|
|||
sourceType: rest.sourceType || TableSourceType.INTERNAL,
|
||||
}
|
||||
|
||||
const isImport = !!rows
|
||||
|
||||
if (!tableToSave.views) {
|
||||
tableToSave.views = {}
|
||||
}
|
||||
|
||||
try {
|
||||
const { table } = await sdk.tables.internal.save(tableToSave, {
|
||||
user: ctx.user,
|
||||
userId: ctx.user._id,
|
||||
rowsToImport: rows,
|
||||
tableId: ctx.request.body._id,
|
||||
renaming,
|
||||
isImport,
|
||||
})
|
||||
|
||||
return table
|
||||
|
@ -72,7 +69,7 @@ export async function bulkImport(
|
|||
await handleDataImport(table, {
|
||||
importRows: rows,
|
||||
identifierFields,
|
||||
user: ctx.user,
|
||||
userId: ctx.user._id,
|
||||
})
|
||||
return table
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ describe("utils", () => {
|
|||
|
||||
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
|
||||
|
||||
const result = await importToRows(data, table, config.user)
|
||||
const result = await importToRows(data, table, config.user?._id)
|
||||
expect(result).toEqual([
|
||||
expect.objectContaining({
|
||||
autoId: 1,
|
||||
|
|
|
@ -18,7 +18,6 @@ import { quotas } from "@budibase/pro"
|
|||
import { events, context, features } from "@budibase/backend-core"
|
||||
import {
|
||||
AutoFieldSubType,
|
||||
ContextUser,
|
||||
Datasource,
|
||||
Row,
|
||||
SourceName,
|
||||
|
@ -122,7 +121,7 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
|
|||
export async function importToRows(
|
||||
data: Row[],
|
||||
table: Table,
|
||||
user?: ContextUser,
|
||||
userId?: string,
|
||||
opts?: { keepCouchId: boolean }
|
||||
) {
|
||||
const originalTable = table
|
||||
|
@ -136,7 +135,7 @@ export async function importToRows(
|
|||
|
||||
// We use a reference to table here and update it after input processing,
|
||||
// so that we can auto increment auto IDs in imported data properly
|
||||
const processed = await inputProcessing(user?._id, table, row, {
|
||||
const processed = await inputProcessing(userId, table, row, {
|
||||
noAutoRelationships: true,
|
||||
})
|
||||
row = processed
|
||||
|
@ -167,11 +166,10 @@ export async function importToRows(
|
|||
|
||||
export async function handleDataImport(
|
||||
table: Table,
|
||||
opts?: { identifierFields?: string[]; user?: ContextUser; importRows?: Row[] }
|
||||
opts?: { identifierFields?: string[]; userId?: string; importRows?: Row[] }
|
||||
) {
|
||||
const schema = table.schema
|
||||
const identifierFields = opts?.identifierFields || []
|
||||
const user = opts?.user
|
||||
const importRows = opts?.importRows
|
||||
|
||||
if (!importRows || !isRows(importRows) || !isSchema(schema)) {
|
||||
|
@ -181,7 +179,7 @@ export async function handleDataImport(
|
|||
const db = context.getAppDB()
|
||||
const data = parse(importRows, table)
|
||||
|
||||
const finalData = await importToRows(data, table, user, {
|
||||
const finalData = await importToRows(data, table, opts?.userId, {
|
||||
keepCouchId: identifierFields.includes("_id"),
|
||||
})
|
||||
|
||||
|
@ -282,22 +280,22 @@ export function checkStaticTables(table: Table) {
|
|||
|
||||
class TableSaveFunctions {
|
||||
db: Database
|
||||
user?: ContextUser
|
||||
userId?: string
|
||||
oldTable?: Table
|
||||
importRows?: Row[]
|
||||
rows: Row[]
|
||||
|
||||
constructor({
|
||||
user,
|
||||
userId,
|
||||
oldTable,
|
||||
importRows,
|
||||
}: {
|
||||
user?: ContextUser
|
||||
userId?: string
|
||||
oldTable?: Table
|
||||
importRows?: Row[]
|
||||
}) {
|
||||
this.db = context.getAppDB()
|
||||
this.user = user
|
||||
this.userId = userId
|
||||
this.oldTable = oldTable
|
||||
this.importRows = importRows
|
||||
// any rows that need updated
|
||||
|
@ -329,7 +327,7 @@ class TableSaveFunctions {
|
|||
table = await handleSearchIndexes(table)
|
||||
table = await handleDataImport(table, {
|
||||
importRows: this.importRows,
|
||||
user: this.user,
|
||||
userId: this.userId,
|
||||
})
|
||||
if (await features.flags.isEnabled("SQS")) {
|
||||
await sdk.tables.sqs.addTable(table)
|
||||
|
|
|
@ -28,6 +28,7 @@ describe.each(
|
|||
const config = setup.getConfig()
|
||||
const isOracle = dbName === DatabaseName.ORACLE
|
||||
const isMsSQL = dbName === DatabaseName.SQL_SERVER
|
||||
const isPostgres = dbName === DatabaseName.POSTGRES
|
||||
|
||||
let rawDatasource: Datasource
|
||||
let datasource: Datasource
|
||||
|
@ -47,6 +48,9 @@ describe.each(
|
|||
transformer: "return data",
|
||||
readable: true,
|
||||
}
|
||||
if (query.fields?.sql && typeof query.fields.sql !== "string") {
|
||||
throw new Error("Unable to create with knex structure in 'sql' field")
|
||||
}
|
||||
return await config.api.query.save(
|
||||
{ ...defaultQuery, ...query },
|
||||
expectations
|
||||
|
@ -207,6 +211,31 @@ describe.each(
|
|||
expect(prodQuery.parameters).toBeUndefined()
|
||||
expect(prodQuery.schema).toBeDefined()
|
||||
})
|
||||
|
||||
isPostgres &&
|
||||
it("should be able to handle a JSON aggregate with newlines", async () => {
|
||||
const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)`
|
||||
const query = await createQuery({
|
||||
fields: {
|
||||
sql: client("test_table")
|
||||
.select([
|
||||
"*",
|
||||
client.raw(
|
||||
`${jsonStatement} as json,\n${jsonStatement} as json2`
|
||||
),
|
||||
])
|
||||
.toString(),
|
||||
},
|
||||
})
|
||||
const res = await config.api.query.execute(
|
||||
query._id!,
|
||||
{},
|
||||
{
|
||||
status: 200,
|
||||
}
|
||||
)
|
||||
expect(res).toBeDefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -1846,7 +1846,7 @@ describe.each([
|
|||
})
|
||||
|
||||
describe("exportRows", () => {
|
||||
beforeAll(async () => {
|
||||
beforeEach(async () => {
|
||||
table = await config.api.table.save(defaultTable())
|
||||
})
|
||||
|
||||
|
@ -1883,6 +1883,16 @@ describe.each([
|
|||
})
|
||||
})
|
||||
|
||||
it("should allow exporting without filtering", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const res = await config.api.row.exportRows(table._id!)
|
||||
const results = JSON.parse(res)
|
||||
expect(results.length).toEqual(1)
|
||||
const row = results[0]
|
||||
|
||||
expect(row._id).toEqual(existing._id)
|
||||
})
|
||||
|
||||
it("should allow exporting only certain columns", async () => {
|
||||
const existing = await config.api.row.save(table._id!, {})
|
||||
const res = await config.api.row.exportRows(table._id!, {
|
||||
|
|
|
@ -187,7 +187,6 @@ describe.each([
|
|||
if (isInMemory) {
|
||||
return dataFilters.search(_.cloneDeep(rows), {
|
||||
...this.query,
|
||||
tableId: tableOrViewId,
|
||||
})
|
||||
} else {
|
||||
return config.api.row.search(tableOrViewId, this.query)
|
||||
|
|
|
@ -221,9 +221,15 @@ class LinkController {
|
|||
link.id !== row._id && link.fieldName === linkedSchema.name
|
||||
)
|
||||
|
||||
// check all the related rows exist
|
||||
const foundRecords = await this._db.getMultiple(
|
||||
links.map(l => l.id),
|
||||
{ allowMissing: true, excludeDocs: true }
|
||||
)
|
||||
|
||||
// The 1 side of 1:N is already related to something else
|
||||
// You must remove the existing relationship
|
||||
if (links.length > 0) {
|
||||
if (foundRecords.length > 0) {
|
||||
throw new Error(
|
||||
`1:N Relationship Error: Record already linked to another.`
|
||||
)
|
||||
|
|
|
@ -41,7 +41,7 @@ if (types) {
|
|||
types.setTypeParser(1184, (val: any) => val) // timestampz
|
||||
}
|
||||
|
||||
const JSON_REGEX = /'{.*}'::json/s
|
||||
const JSON_REGEX = /'{\s*.*?\s*}'::json/gs
|
||||
const Sql = sql.Sql
|
||||
|
||||
interface PostgresConfig {
|
||||
|
|
|
@ -62,10 +62,10 @@ export async function exportRows(
|
|||
).rows.map(row => row.doc!)
|
||||
|
||||
result = await outputProcessing(table, response)
|
||||
} else if (query) {
|
||||
} else {
|
||||
let searchResponse = await sdk.rows.search({
|
||||
tableId,
|
||||
query,
|
||||
query: query || {},
|
||||
sort,
|
||||
sortOrder,
|
||||
})
|
||||
|
|
|
@ -0,0 +1,19 @@
|
|||
import { Row, Table } from "@budibase/types"
|
||||
|
||||
import * as external from "./external"
|
||||
import * as internal from "./internal"
|
||||
import { isExternal } from "./utils"
|
||||
|
||||
export async function create(
|
||||
table: Omit<Table, "_id" | "_rev">,
|
||||
rows?: Row[],
|
||||
userId?: string
|
||||
): Promise<Table> {
|
||||
let createdTable: Table
|
||||
if (isExternal({ table })) {
|
||||
createdTable = await external.create(table)
|
||||
} else {
|
||||
createdTable = await internal.create(table, rows, userId)
|
||||
}
|
||||
return createdTable
|
||||
}
|
|
@ -8,8 +8,11 @@ import {
|
|||
ViewV2,
|
||||
AutoFieldSubType,
|
||||
} from "@budibase/types"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { buildExternalTableId } from "../../../../integrations/utils"
|
||||
import { context, HTTPError } from "@budibase/backend-core"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
buildExternalTableId,
|
||||
} from "../../../../integrations/utils"
|
||||
import {
|
||||
foreignKeyStructure,
|
||||
hasTypeChanged,
|
||||
|
@ -86,6 +89,35 @@ function validate(table: Table, oldTable?: Table) {
|
|||
}
|
||||
}
|
||||
|
||||
function getDatasourceId(table: Table) {
|
||||
if (!table) {
|
||||
throw new Error("No table supplied")
|
||||
}
|
||||
if (table.sourceId) {
|
||||
return table.sourceId
|
||||
}
|
||||
if (!table._id) {
|
||||
throw new Error("No table ID supplied")
|
||||
}
|
||||
return breakExternalTableId(table._id).datasourceId
|
||||
}
|
||||
|
||||
export async function create(table: Omit<Table, "_id" | "_rev">) {
|
||||
const datasourceId = getDatasourceId(table)
|
||||
|
||||
const tableToCreate = { ...table, created: true }
|
||||
try {
|
||||
const result = await save(datasourceId!, tableToCreate)
|
||||
return result.table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
throw new HTTPError(err.message, 400)
|
||||
} else {
|
||||
throw new HTTPError(err?.message || err, err.status || 500)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(
|
||||
datasourceId: string,
|
||||
update: Table,
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { populateExternalTableSchemas } from "./validation"
|
||||
import * as getters from "./getters"
|
||||
import * as create from "./create"
|
||||
import * as updates from "./update"
|
||||
import * as utils from "./utils"
|
||||
import { migrate } from "./migration"
|
||||
|
@ -7,6 +8,7 @@ import * as sqs from "./internal/sqs"
|
|||
|
||||
export default {
|
||||
populateExternalTableSchemas,
|
||||
...create,
|
||||
...updates,
|
||||
...getters,
|
||||
...utils,
|
||||
|
|
|
@ -5,7 +5,7 @@ import {
|
|||
ViewStatisticsSchema,
|
||||
ViewV2,
|
||||
Row,
|
||||
ContextUser,
|
||||
TableSourceType,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
hasTypeChanged,
|
||||
|
@ -16,18 +16,56 @@ import { EventType, updateLinks } from "../../../../db/linkedRows"
|
|||
import { cloneDeep } from "lodash/fp"
|
||||
import isEqual from "lodash/isEqual"
|
||||
import { runStaticFormulaChecks } from "../../../../api/controllers/table/bulkFormula"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { context, HTTPError } from "@budibase/backend-core"
|
||||
import { findDuplicateInternalColumns } from "@budibase/shared-core"
|
||||
import { getTable } from "../getters"
|
||||
import { checkAutoColumns } from "./utils"
|
||||
import * as viewsSdk from "../../views"
|
||||
import { getRowParams } from "../../../../db/utils"
|
||||
import { generateTableID, getRowParams } from "../../../../db/utils"
|
||||
import { quotas } from "@budibase/pro"
|
||||
|
||||
export async function create(
|
||||
table: Omit<Table, "_id" | "_rev">,
|
||||
rows?: Row[],
|
||||
userId?: string
|
||||
) {
|
||||
const tableId = generateTableID()
|
||||
|
||||
let tableToSave: Table = {
|
||||
_id: tableId,
|
||||
...table,
|
||||
// Ensure these fields are populated, even if not sent in the request
|
||||
type: table.type || "table",
|
||||
sourceType: TableSourceType.INTERNAL,
|
||||
}
|
||||
|
||||
const isImport = !!rows
|
||||
|
||||
if (!tableToSave.views) {
|
||||
tableToSave.views = {}
|
||||
}
|
||||
|
||||
try {
|
||||
const { table } = await save(tableToSave, {
|
||||
userId,
|
||||
rowsToImport: rows,
|
||||
isImport,
|
||||
})
|
||||
|
||||
return table
|
||||
} catch (err: any) {
|
||||
if (err instanceof Error) {
|
||||
throw new HTTPError(err.message, 400)
|
||||
} else {
|
||||
throw new HTTPError(err.message || err, err.status || 500)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function save(
|
||||
table: Table,
|
||||
opts?: {
|
||||
user?: ContextUser
|
||||
userId?: string
|
||||
tableId?: string
|
||||
rowsToImport?: Row[]
|
||||
renaming?: RenameColumn
|
||||
|
@ -63,7 +101,7 @@ export async function save(
|
|||
// saving a table is a complex operation, involving many different steps, this
|
||||
// has been broken out into a utility to make it more obvious/easier to manipulate
|
||||
const tableSaveFunctions = new TableSaveFunctions({
|
||||
user: opts?.user,
|
||||
userId: opts?.userId,
|
||||
oldTable,
|
||||
importRows: opts?.rowsToImport,
|
||||
})
|
||||
|
|
|
@ -105,7 +105,7 @@ export class RowAPI extends TestAPI {
|
|||
|
||||
exportRows = async (
|
||||
tableId: string,
|
||||
body: ExportRowsRequest,
|
||||
body?: ExportRowsRequest,
|
||||
format: RowExportFormat = RowExportFormat.JSON,
|
||||
expectations?: Expectations
|
||||
) => {
|
||||
|
|
|
@ -639,19 +639,19 @@ export function fixupFilterArrays(filters: SearchFilters) {
|
|||
return filters
|
||||
}
|
||||
|
||||
export function search<T>(
|
||||
docs: Record<string, T>[],
|
||||
query: RowSearchParams
|
||||
): SearchResponse<Record<string, T>> {
|
||||
export function search<T extends Record<string, any>>(
|
||||
docs: T[],
|
||||
query: Omit<RowSearchParams, "tableId">
|
||||
): SearchResponse<T> {
|
||||
let result = runQuery(docs, query.query)
|
||||
if (query.sort) {
|
||||
result = sort(result, query.sort, query.sortOrder || SortOrder.ASCENDING)
|
||||
}
|
||||
let totalRows = result.length
|
||||
const totalRows = result.length
|
||||
if (query.limit) {
|
||||
result = limit(result, query.limit.toString())
|
||||
}
|
||||
const response: SearchResponse<Record<string, any>> = { rows: result }
|
||||
const response: SearchResponse<T> = { rows: result }
|
||||
if (query.countRows) {
|
||||
response.totalRows = totalRows
|
||||
}
|
||||
|
|
|
@ -5,6 +5,7 @@ import {
|
|||
SearchFilters,
|
||||
BasicOperator,
|
||||
ArrayOperator,
|
||||
isLogicalSearchOperator,
|
||||
} from "@budibase/types"
|
||||
import * as Constants from "./constants"
|
||||
import { removeKeyNumbering } from "./filters"
|
||||
|
@ -97,10 +98,20 @@ export function isSupportedUserSearch(query: SearchFilters) {
|
|||
{ op: BasicOperator.EQUAL, key: "_id" },
|
||||
{ op: ArrayOperator.ONE_OF, key: "_id" },
|
||||
]
|
||||
for (let [key, operation] of Object.entries(query)) {
|
||||
for (const [key, operation] of Object.entries(query)) {
|
||||
if (typeof operation !== "object") {
|
||||
return false
|
||||
}
|
||||
|
||||
if (isLogicalSearchOperator(key)) {
|
||||
for (const condition of query[key]!.conditions) {
|
||||
if (!isSupportedUserSearch(condition)) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
const fields = Object.keys(operation || {})
|
||||
// this filter doesn't contain options - ignore
|
||||
if (fields.length === 0) {
|
||||
|
|
|
@ -133,7 +133,7 @@ export interface Database {
|
|||
exists(docId: string): Promise<boolean>
|
||||
getMultiple<T extends Document>(
|
||||
ids: string[],
|
||||
opts?: { allowMissing?: boolean }
|
||||
opts?: { allowMissing?: boolean; excludeDocs?: boolean }
|
||||
): Promise<T[]>
|
||||
remove(idOrDoc: Document): Promise<Nano.DocumentDestroyResponse>
|
||||
remove(idOrDoc: string, rev?: string): Promise<Nano.DocumentDestroyResponse>
|
||||
|
|
|
@ -741,6 +741,25 @@ describe("/api/global/users", () => {
|
|||
it("should throw an error if public query performed", async () => {
|
||||
await config.api.users.searchUsers({}, { status: 403, noHeaders: true })
|
||||
})
|
||||
|
||||
it("should be able to search using logical conditions", async () => {
|
||||
const user = await config.createUser()
|
||||
const response = await config.api.users.searchUsers({
|
||||
query: {
|
||||
$and: {
|
||||
conditions: [
|
||||
{
|
||||
$and: {
|
||||
conditions: [{ string: { email: user.email } }],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
expect(response.body.data.length).toBe(1)
|
||||
expect(response.body.data[0].email).toBe(user.email)
|
||||
})
|
||||
})
|
||||
|
||||
describe("DELETE /api/global/users/:userId", () => {
|
||||
|
|
Loading…
Reference in New Issue