Merge remote-tracking branch 'origin/feature/app-list-actions' into feature/app-favourites

This commit is contained in:
Dean 2024-03-14 09:52:01 +00:00
commit 9755d40203
27 changed files with 432 additions and 111 deletions

View File

@ -1,5 +1,5 @@
{ {
"version": "2.21.6", "version": "2.21.9",
"npmClient": "yarn", "npmClient": "yarn",
"packages": [ "packages": [
"packages/*", "packages/*",

View File

@ -1,5 +1,4 @@
import { IdentityContext, VM } from "@budibase/types" import { IdentityContext, VM } from "@budibase/types"
import { ExecutionTimeTracker } from "../timers"
// keep this out of Budibase types, don't want to expose context info // keep this out of Budibase types, don't want to expose context info
export type ContextMap = { export type ContextMap = {
@ -10,6 +9,6 @@ export type ContextMap = {
isScim?: boolean isScim?: boolean
automationId?: string automationId?: string
isMigrating?: boolean isMigrating?: boolean
jsExecutionTracker?: ExecutionTimeTracker
vm?: VM vm?: VM
cleanup?: (() => void | Promise<void>)[]
} }

View File

@ -20,41 +20,3 @@ export function cleanup() {
} }
intervals = [] intervals = []
} }
export class ExecutionTimeoutError extends Error {
public readonly name = "ExecutionTimeoutError"
}
export class ExecutionTimeTracker {
static withLimit(limitMs: number) {
return new ExecutionTimeTracker(limitMs)
}
constructor(readonly limitMs: number) {}
private totalTimeMs = 0
track<T>(f: () => T): T {
this.checkLimit()
const start = process.hrtime.bigint()
try {
return f()
} finally {
const end = process.hrtime.bigint()
this.totalTimeMs += Number(end - start) / 1e6
this.checkLimit()
}
}
get elapsedMS() {
return this.totalTimeMs
}
checkLimit() {
if (this.totalTimeMs > this.limitMs) {
throw new ExecutionTimeoutError(
`Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`
)
}
}
}

View File

@ -49,7 +49,8 @@
$: group = $groups.find(x => x._id === groupId) $: group = $groups.find(x => x._id === groupId)
$: isScimGroup = group?.scimInfo?.isSync $: isScimGroup = group?.scimInfo?.isSync
$: readonly = !sdk.users.isAdmin($auth.user) || isScimGroup $: isAdmin = sdk.users.isAdmin($auth.user)
$: readonly = !isAdmin || isScimGroup
$: groupApps = $appsStore.apps $: groupApps = $appsStore.apps
.filter(app => .filter(app =>
groups.actions groups.actions
@ -123,14 +124,18 @@
<span slot="control"> <span slot="control">
<Icon hoverable name="More" /> <Icon hoverable name="More" />
</span> </span>
<MenuItem icon="Refresh" on:click={() => editModal.show()}> <MenuItem
icon="Refresh"
on:click={() => editModal.show()}
disabled={!isAdmin}
>
Edit Edit
</MenuItem> </MenuItem>
<div title={isScimGroup && "Group synced from your AD"}> <div title={isScimGroup && "Group synced from your AD"}>
<MenuItem <MenuItem
icon="Delete" icon="Delete"
on:click={() => deleteModal.show()} on:click={() => deleteModal.show()}
disabled={isScimGroup} disabled={readonly}
> >
Delete Delete
</MenuItem> </MenuItem>
@ -139,7 +144,7 @@
</div> </div>
<Layout noPadding gap="S"> <Layout noPadding gap="S">
<GroupUsers {groupId} {readonly} /> <GroupUsers {groupId} {readonly} {isScimGroup} />
</Layout> </Layout>
<Layout noPadding gap="S"> <Layout noPadding gap="S">

View File

@ -13,6 +13,7 @@
export let groupId export let groupId
export let readonly export let readonly
export let isScimGroup
let emailSearch let emailSearch
let fetchGroupUsers let fetchGroupUsers
@ -61,10 +62,10 @@
</script> </script>
<div class="header"> <div class="header">
{#if !readonly} {#if isScimGroup}
<EditUserPicker {groupId} onUsersUpdated={fetchGroupUsers.getInitialData} />
{:else}
<ActiveDirectoryInfo text="Users synced from your AD" /> <ActiveDirectoryInfo text="Users synced from your AD" />
{:else if !readonly}
<EditUserPicker {groupId} onUsersUpdated={fetchGroupUsers.getInitialData} />
{/if} {/if}
<div class="controls-right"> <div class="controls-right">

View File

@ -39,9 +39,10 @@
name: { name: {
width: "1fr", width: "1fr",
}, },
...(readonly ...(!isAdmin
? {} ? {}
: { : // Add
{
_id: { _id: {
displayName: "", displayName: "",
width: "auto", width: "auto",
@ -90,7 +91,9 @@
$: internalGroups = $groups?.filter(g => !g?.scimInfo?.isSync) $: internalGroups = $groups?.filter(g => !g?.scimInfo?.isSync)
$: isSSO = !!user?.provider $: isSSO = !!user?.provider
$: readonly = !sdk.users.isAdmin($auth.user) || user?.scimInfo?.isSync $: isAdmin = sdk.users.isAdmin($auth.user)
$: isScim = user?.scimInfo?.isSync
$: readonly = !isAdmin || isScim
$: privileged = sdk.users.isAdminOrGlobalBuilder(user) $: privileged = sdk.users.isAdminOrGlobalBuilder(user)
$: nameLabel = getNameLabel(user) $: nameLabel = getNameLabel(user)
$: filteredGroups = getFilteredGroups(internalGroups, searchTerm) $: filteredGroups = getFilteredGroups(internalGroups, searchTerm)
@ -322,23 +325,23 @@
<Layout gap="S" noPadding> <Layout gap="S" noPadding>
<div class="tableTitle"> <div class="tableTitle">
<Heading size="S">Groups</Heading> <Heading size="S">Groups</Heading>
{#if internalGroups?.length} {#if internalGroups?.length && isAdmin}
<div bind:this={popoverAnchor}> <div bind:this={popoverAnchor}>
<Button on:click={popover.show()} secondary>Add to group</Button> <Button on:click={popover.show()} secondary>Add to group</Button>
</div> </div>
<Popover align="right" bind:this={popover} anchor={popoverAnchor}>
<UserGroupPicker
labelKey="name"
bind:searchTerm
list={filteredGroups}
selected={user.userGroups}
on:select={e => addGroup(e.detail)}
on:deselect={e => removeGroup(e.detail)}
iconComponent={GroupIcon}
extractIconProps={item => ({ group: item, size: "S" })}
/>
</Popover>
{/if} {/if}
<Popover align="right" bind:this={popover} anchor={popoverAnchor}>
<UserGroupPicker
labelKey="name"
bind:searchTerm
list={filteredGroups}
selected={user.userGroups}
on:select={e => addGroup(e.detail)}
on:deselect={e => removeGroup(e.detail)}
iconComponent={GroupIcon}
extractIconProps={item => ({ group: item, size: "S" })}
/>
</Popover>
</div> </div>
<Table <Table
schema={groupSchema} schema={groupSchema}

View File

@ -35,7 +35,9 @@ export function createGroupsStore() {
get: getGroup, get: getGroup,
save: async group => { save: async group => {
const { _scimInfo, ...dataToSave } = group const { ...dataToSave } = group
delete dataToSave.scimInfo
delete dataToSave.userGroups
const response = await API.saveGroup(dataToSave) const response = await API.saveGroup(dataToSave)
group._id = response._id group._id = response._id
group._rev = response._rev group._rev = response._rev

@ -1 +1 @@
Subproject commit 4e66a0f7042652763c238b10367310b168905f87 Subproject commit c4c98ae70f2e936009250893898ecf11f4ddf2c3

View File

@ -1,6 +1,7 @@
import Router from "@koa/router" import Router from "@koa/router"
import { auth, middleware, env as envCore } from "@budibase/backend-core" import { auth, middleware, env as envCore } from "@budibase/backend-core"
import currentApp from "../middleware/currentapp" import currentApp from "../middleware/currentapp"
import cleanup from "../middleware/cleanup"
import zlib from "zlib" import zlib from "zlib"
import { mainRoutes, staticRoutes, publicRoutes } from "./routes" import { mainRoutes, staticRoutes, publicRoutes } from "./routes"
import { middleware as pro } from "@budibase/pro" import { middleware as pro } from "@budibase/pro"
@ -62,6 +63,8 @@ if (apiEnabled()) {
.use(auth.auditLog) .use(auth.auditLog)
// @ts-ignore // @ts-ignore
.use(migrations) .use(migrations)
// @ts-ignore
.use(cleanup)
// authenticated routes // authenticated routes
for (let route of mainRoutes) { for (let route of mainRoutes) {

View File

@ -38,11 +38,18 @@ import * as uuid from "uuid"
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString() const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp) tk.freeze(timestamp)
jest.unmock("mysql2")
jest.unmock("mysql2/promise")
jest.unmock("mssql")
const { basicRow } = setup.structures const { basicRow } = setup.structures
describe.each([ describe.each([
["internal", undefined], ["internal", undefined],
["postgres", databaseTestProviders.postgres], ["postgres", databaseTestProviders.postgres],
["mysql", databaseTestProviders.mysql],
["mssql", databaseTestProviders.mssql],
["mariadb", databaseTestProviders.mariadb],
])("/rows (%s)", (__, dsProvider) => { ])("/rows (%s)", (__, dsProvider) => {
const isInternal = !dsProvider const isInternal = !dsProvider
@ -70,7 +77,7 @@ describe.each([
const generateTableConfig: () => SaveTableRequest = () => { const generateTableConfig: () => SaveTableRequest = () => {
return { return {
name: uuid.v4(), name: uuid.v4().substring(0, 16),
type: "table", type: "table",
primary: ["id"], primary: ["id"],
primaryDisplay: "name", primaryDisplay: "name",
@ -467,7 +474,6 @@ describe.each([
const createRowResponse = await config.api.row.save( const createRowResponse = await config.api.row.save(
createViewResponse.id, createViewResponse.id,
{ {
OrderID: "1111",
Country: "Aussy", Country: "Aussy",
Story: "aaaaa", Story: "aaaaa",
} }
@ -477,7 +483,7 @@ describe.each([
expect(row.Story).toBeUndefined() expect(row.Story).toBeUndefined()
expect(row).toEqual({ expect(row).toEqual({
...defaultRowFields, ...defaultRowFields,
OrderID: 1111, OrderID: createRowResponse.OrderID,
Country: "Aussy", Country: "Aussy",
_id: createRowResponse._id, _id: createRowResponse._id,
_rev: createRowResponse._rev, _rev: createRowResponse._rev,
@ -641,7 +647,7 @@ describe.each([
const createdRow = await config.createRow() const createdRow = await config.createRow()
const res = await config.api.row.bulkDelete(table._id!, { const res = await config.api.row.bulkDelete(table._id!, {
rows: [createdRow, { _id: "2" }], rows: [createdRow, { _id: "9999999" }],
}) })
expect(res[0]._id).toEqual(createdRow._id) expect(res[0]._id).toEqual(createdRow._id)

View File

@ -4,11 +4,17 @@ import { QueryOptions } from "../../definitions/datasource"
import { isIsoDateString, SqlClient, isValidFilter } from "../utils" import { isIsoDateString, SqlClient, isValidFilter } from "../utils"
import SqlTableQueryBuilder from "./sqlTable" import SqlTableQueryBuilder from "./sqlTable"
import { import {
BBReferenceFieldMetadata,
FieldSchema,
FieldSubtype,
FieldType,
JsonFieldMetadata,
Operation, Operation,
QueryJson, QueryJson,
RelationshipsJson, RelationshipsJson,
SearchFilters, SearchFilters,
SortDirection, SortDirection,
Table,
} from "@budibase/types" } from "@budibase/types"
import environment from "../../environment" import environment from "../../environment"
@ -691,6 +697,37 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return results.length ? results : [{ [operation.toLowerCase()]: true }] return results.length ? results : [{ [operation.toLowerCase()]: true }]
} }
convertJsonStringColumns(
table: Table,
results: Record<string, any>[]
): Record<string, any>[] {
for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) {
continue
}
const fullName = `${table.name}.${name}`
for (let row of results) {
if (typeof row[fullName] === "string") {
row[fullName] = JSON.parse(row[fullName])
}
if (typeof row[name] === "string") {
row[name] = JSON.parse(row[name])
}
}
}
return results
}
_isJsonColumn(
field: FieldSchema
): field is JsonFieldMetadata | BBReferenceFieldMetadata {
return (
field.type === FieldType.JSON ||
(field.type === FieldType.BB_REFERENCE &&
field.subtype === FieldSubtype.USERS)
)
}
log(query: string, values?: any[]) { log(query: string, values?: any[]) {
if (!environment.SQL_LOGGING_ENABLE) { if (!environment.SQL_LOGGING_ENABLE) {
return return

View File

@ -14,6 +14,8 @@ import {
Schema, Schema,
TableSourceType, TableSourceType,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
FieldType,
FieldSubtype,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -502,8 +504,14 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
} }
const operation = this._operation(json) const operation = this._operation(json)
const queryFn = (query: any, op: string) => this.internalQuery(query, op) const queryFn = (query: any, op: string) => this.internalQuery(query, op)
const processFn = (result: any) => const processFn = (result: any) => {
result.recordset ? result.recordset : [{ [operation]: true }] if (json?.meta?.table && result.recordset) {
return this.convertJsonStringColumns(json.meta.table, result.recordset)
} else if (result.recordset) {
return result.recordset
}
return [{ [operation]: true }]
}
return this.queryWithReturning(json, queryFn, processFn) return this.queryWithReturning(json, queryFn, processFn)
} }

View File

@ -13,6 +13,8 @@ import {
Schema, Schema,
TableSourceType, TableSourceType,
DatasourcePlusQueryResponse, DatasourcePlusQueryResponse,
FieldType,
FieldSubtype,
} from "@budibase/types" } from "@budibase/types"
import { import {
getSqlQuery, getSqlQuery,
@ -386,7 +388,13 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
try { try {
const queryFn = (query: any) => const queryFn = (query: any) =>
this.internalQuery(query, { connect: false, disableCoercion: true }) this.internalQuery(query, { connect: false, disableCoercion: true })
return await this.queryWithReturning(json, queryFn) const processFn = (result: any) => {
if (json?.meta?.table && Array.isArray(result)) {
return this.convertJsonStringColumns(json.meta.table, result)
}
return result
}
return await this.queryWithReturning(json, queryFn, processFn)
} finally { } finally {
await this.disconnect() await this.disconnect()
} }

View File

@ -4,6 +4,8 @@ import { Datasource } from "@budibase/types"
import * as postgres from "./postgres" import * as postgres from "./postgres"
import * as mongodb from "./mongodb" import * as mongodb from "./mongodb"
import * as mysql from "./mysql" import * as mysql from "./mysql"
import * as mssql from "./mssql"
import * as mariadb from "./mariadb"
import { StartedTestContainer } from "testcontainers" import { StartedTestContainer } from "testcontainers"
jest.setTimeout(30000) jest.setTimeout(30000)
@ -14,4 +16,10 @@ export interface DatabaseProvider {
datasource(): Promise<Datasource> datasource(): Promise<Datasource>
} }
export const databaseTestProviders = { postgres, mongodb, mysql } export const databaseTestProviders = {
postgres,
mongodb,
mysql,
mssql,
mariadb,
}

View File

@ -0,0 +1,58 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
let container: StartedTestContainer | undefined
class MariaDBWaitStrategy extends AbstractWaitStrategy {
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
// Because MariaDB first starts itself up, runs an init script, then restarts,
// it's possible for the mysqladmin ping to succeed early and then tests to
// run against a MariaDB that's mid-restart and fail. To get around this, we
// wait for logs and then do a ping check.
const logs = Wait.forLogMessage("mariadbd: ready for connections", 2)
await logs.waitUntilReady(container, boundPorts, startTime)
const command = Wait.forSuccessfulCommand(
`mysqladmin ping -h localhost -P 3306 -u root -ppassword`
)
await command.waitUntilReady(container)
}
}
export async function start(): Promise<StartedTestContainer> {
return await new GenericContainer("mariadb:lts")
.withExposedPorts(3306)
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
.withWaitStrategy(new MariaDBWaitStrategy())
.start()
}
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
}
const host = container.getHost()
const port = container.getMappedPort(3306)
return {
type: "datasource_plus",
source: SourceName.MYSQL,
plus: true,
config: {
host,
port,
user: "root",
password: "password",
database: "mysql",
},
}
}
export async function stop() {
if (container) {
await container.stop()
container = undefined
}
}

View File

@ -0,0 +1,53 @@
import { Datasource, SourceName } from "@budibase/types"
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
let container: StartedTestContainer | undefined
export async function start(): Promise<StartedTestContainer> {
return await new GenericContainer(
"mcr.microsoft.com/mssql/server:2022-latest"
)
.withExposedPorts(1433)
.withEnvironment({
ACCEPT_EULA: "Y",
MSSQL_SA_PASSWORD: "Password_123",
// This is important, as Microsoft allow us to use the "Developer" edition
// of SQL Server for development and testing purposes. We can't use other
// versions without a valid license, and we cannot use the Developer
// version in production.
MSSQL_PID: "Developer",
})
.withWaitStrategy(
Wait.forSuccessfulCommand(
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
)
)
.start()
}
export async function datasource(): Promise<Datasource> {
if (!container) {
container = await start()
}
const host = container.getHost()
const port = container.getMappedPort(1433)
return {
type: "datasource_plus",
source: SourceName.SQL_SERVER,
plus: true,
config: {
server: host,
port,
user: "sa",
password: "Password_123",
},
}
}
export async function stop() {
if (container) {
await container.stop()
container = undefined
}
}

View File

@ -8,6 +8,7 @@ import {
import { context, logging } from "@budibase/backend-core" import { context, logging } from "@budibase/backend-core"
import tracer from "dd-trace" import tracer from "dd-trace"
import { IsolatedVM } from "./vm" import { IsolatedVM } from "./vm"
import type { VM } from "@budibase/types"
export function init() { export function init() {
setJSRunner((js: string, ctx: Record<string, any>) => { setJSRunner((js: string, ctx: Record<string, any>) => {
@ -15,18 +16,23 @@ export function init() {
try { try {
const bbCtx = context.getCurrentContext() const bbCtx = context.getCurrentContext()
const vm = bbCtx?.vm const vm =
? bbCtx.vm bbCtx?.vm ||
: new IsolatedVM({ new IsolatedVM({
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT, memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS, invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS, isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
}).withHelpers() }).withHelpers()
if (bbCtx) { if (bbCtx && !bbCtx.vm) {
// If we have a context, we want to persist it to reuse the isolate
bbCtx.vm = vm bbCtx.vm = vm
bbCtx.cleanup = bbCtx.cleanup || []
bbCtx.cleanup.push(() => vm.close())
} }
// Because we can't pass functions into an Isolate, we remove them from
// the passed context and rely on the withHelpers() method to add them
// back in.
const { helpers, ...rest } = ctx const { helpers, ...rest } = ctx
return vm.withContext(rest, () => vm.execute(js)) return vm.withContext(rest, () => vm.execute(js))
} catch (error: any) { } catch (error: any) {

View File

@ -195,6 +195,11 @@ export class IsolatedVM implements VM {
return result[this.runResultKey] return result[this.runResultKey]
} }
close(): void {
this.vm.release()
this.isolate.dispose()
}
private registerCallbacks(functions: Record<string, any>) { private registerCallbacks(functions: Record<string, any>) {
const libId = crypto.randomUUID().replace(/-/g, "") const libId = crypto.randomUUID().replace(/-/g, "")

View File

@ -0,0 +1,33 @@
import { Ctx } from "@budibase/types"
import { context } from "@budibase/backend-core"
import { tracer } from "dd-trace"
export default async (ctx: Ctx, next: any) => {
const resp = await next()
const current = context.getCurrentContext()
if (!current || !current.cleanup) {
return resp
}
let errors = []
for (let fn of current.cleanup) {
try {
await tracer.trace("cleanup", async span => {
await fn()
})
} catch (e) {
// We catch errors here to ensure we at least attempt to run all cleanup
// functions. We'll throw the first error we encounter after all cleanup
// functions have been run.
errors.push(e)
}
}
delete current.cleanup
if (errors.length > 0) {
throw errors[0]
}
return resp
}

View File

@ -1,10 +1,4 @@
import { import { Row, SearchFilters, SearchParams, SortOrder } from "@budibase/types"
Row,
SearchFilters,
SearchParams,
SortOrder,
SortType,
} from "@budibase/types"
import { isExternalTableID } from "../../../integrations/utils" import { isExternalTableID } from "../../../integrations/utils"
import * as internal from "./search/internal" import * as internal from "./search/internal"
import * as external from "./search/external" import * as external from "./search/external"

View File

@ -43,7 +43,7 @@ export class AttachmentCleanup {
if ((columnRemoved && !renaming) || opts.deleting) { if ((columnRemoved && !renaming) || opts.deleting) {
rows.forEach(row => { rows.forEach(row => {
files = files.concat( files = files.concat(
row[key].map((attachment: any) => attachment.key) (row[key] || []).map((attachment: any) => attachment.key)
) )
}) })
} }

View File

@ -115,4 +115,31 @@ describe("attachment cleanup", () => {
await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() }) await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() })
expect(mockedDeleteFiles).not.toBeCalled() expect(mockedDeleteFiles).not.toBeCalled()
}) })
it("should be able to cleanup a column and not throw when attachments are undefined", async () => {
const originalTable = table()
delete originalTable.schema["attach"]
await AttachmentCleanup.tableUpdate(
originalTable,
[row("file 1"), { attach: undefined }, row("file 2")],
{
oldTable: table(),
}
)
expect(mockedDeleteFiles).toBeCalledTimes(1)
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, ["file 1", "file 2"])
})
it("should be able to cleanup a column and not throw when ALL attachments are undefined", async () => {
const originalTable = table()
delete originalTable.schema["attach"]
await AttachmentCleanup.tableUpdate(
originalTable,
[{}, { attach: undefined }],
{
oldTable: table(),
}
)
expect(mockedDeleteFiles).not.toBeCalled()
})
}) })

View File

@ -1,4 +1,5 @@
export interface VM { export interface VM {
execute(code: string): any execute(code: string): any
withContext<T>(context: Record<string, any>, executeWithContext: () => T): T withContext<T>(context: Record<string, any>, executeWithContext: () => T): T
close(): void
} }

View File

@ -104,17 +104,79 @@ describe("/api/global/groups", () => {
expect(events.group.permissionsEdited).not.toBeCalled() expect(events.group.permissionsEdited).not.toBeCalled()
}) })
describe("destroy", () => { describe("scim", () => {
it("should be able to delete a basic group", async () => { async function createScimGroup() {
const group = structures.groups.UserGroup() mocks.licenses.useScimIntegration()
let oldGroup = await config.api.groups.saveGroup(group) await config.setSCIMConfig(true)
await config.api.groups.deleteGroup(
oldGroup.body._id,
oldGroup.body._rev
)
expect(events.group.deleted).toBeCalledTimes(1) const scimGroup = await config.api.scimGroupsAPI.post({
body: structures.scim.createGroupRequest({
displayName: generator.word(),
}),
})
const { body: group } = await config.api.groups.find(scimGroup.id)
expect(group).toBeDefined()
return group
}
it("update will not allow sending SCIM fields", async () => {
const group = await createScimGroup()
const updatedGroup: UserGroup = {
...group,
name: generator.word(),
}
await config.api.groups.saveGroup(updatedGroup, {
expect: {
message: 'Invalid body - "scimInfo" is not allowed',
status: 400,
},
})
expect(events.group.updated).not.toBeCalled()
}) })
it("update will not amend the SCIM fields", async () => {
const group: UserGroup = await createScimGroup()
const updatedGroup: UserGroup = {
...group,
name: generator.word(),
scimInfo: undefined,
}
await config.api.groups.saveGroup(updatedGroup, {
expect: 200,
})
expect(events.group.updated).toBeCalledTimes(1)
expect(
(
await config.api.groups.find(group._id!, {
expect: 200,
})
).body
).toEqual(
expect.objectContaining({
...group,
name: updatedGroup.name,
scimInfo: group.scimInfo,
_rev: expect.any(String),
})
)
})
})
})
describe("destroy", () => {
it("should be able to delete a basic group", async () => {
const group = structures.groups.UserGroup()
let oldGroup = await config.api.groups.saveGroup(group)
await config.api.groups.deleteGroup(oldGroup.body._id, oldGroup.body._rev)
expect(events.group.deleted).toBeCalledTimes(1)
}) })
}) })
@ -147,7 +209,7 @@ describe("/api/global/groups", () => {
await Promise.all( await Promise.all(
Array.from({ length: 30 }).map(async (_, i) => { Array.from({ length: 30 }).map(async (_, i) => {
const email = `user${i}@example.com` const email = `user${i}+${generator.guid()}@example.com`
const user = await config.api.users.saveUser({ const user = await config.api.users.saveUser({
...structures.users.user(), ...structures.users.user(),
email, email,
@ -257,12 +319,16 @@ describe("/api/global/groups", () => {
}) })
}) })
it("update should return 200", async () => { it("update should return forbidden", async () => {
await config.withUser(builder, async () => { await config.withUser(builder, async () => {
await config.api.groups.updateGroupUsers(group._id!, { await config.api.groups.updateGroupUsers(
add: [builder._id!], group._id!,
remove: [], {
}) add: [builder._id!],
remove: [],
},
{ expect: 403 }
)
}) })
}) })
}) })

View File

@ -2,6 +2,7 @@ import tk from "timekeeper"
import _ from "lodash" import _ from "lodash"
import { generator, mocks, structures } from "@budibase/backend-core/tests" import { generator, mocks, structures } from "@budibase/backend-core/tests"
import { import {
CloudAccount,
ScimCreateUserRequest, ScimCreateUserRequest,
ScimGroupResponse, ScimGroupResponse,
ScimUpdateRequest, ScimUpdateRequest,
@ -604,6 +605,25 @@ describe("scim", () => {
expect(events.user.deleted).toBeCalledTimes(1) expect(events.user.deleted).toBeCalledTimes(1)
}) })
it("an account holder cannot be removed even when synched", async () => {
const account: CloudAccount = {
...structures.accounts.account(),
budibaseUserId: user.id,
email: user.emails![0].value,
}
mocks.accounts.getAccount.mockResolvedValue(account)
await deleteScimUser(user.id, {
expect: {
message: "Account holder cannot be deleted",
status: 400,
error: { code: "http" },
},
})
await config.api.scimUsersAPI.find(user.id, { expect: 200 })
})
}) })
}) })

View File

@ -7,7 +7,10 @@ export class GroupsAPI extends TestAPI {
super(config) super(config)
} }
saveGroup = (group: UserGroup, { expect } = { expect: 200 }) => { saveGroup = (
group: UserGroup,
{ expect }: { expect: number | object } = { expect: 200 }
) => {
return this.request return this.request
.post(`/api/global/groups`) .post(`/api/global/groups`)
.send(group) .send(group)
@ -44,14 +47,15 @@ export class GroupsAPI extends TestAPI {
updateGroupUsers = ( updateGroupUsers = (
id: string, id: string,
body: { add: string[]; remove: string[] } body: { add: string[]; remove: string[] },
{ expect } = { expect: 200 }
) => { ) => {
return this.request return this.request
.post(`/api/global/groups/${id}/users`) .post(`/api/global/groups/${id}/users`)
.send(body) .send(body)
.set(this.config.defaultHeaders()) .set(this.config.defaultHeaders())
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(200) .expect(expect)
} }
fetch = ({ expect } = { expect: 200 }) => { fetch = ({ expect } = { expect: 200 }) => {
@ -61,4 +65,12 @@ export class GroupsAPI extends TestAPI {
.expect("Content-Type", /json/) .expect("Content-Type", /json/)
.expect(expect) .expect(expect)
} }
find = (id: string, { expect } = { expect: 200 }) => {
return this.request
.get(`/api/global/groups/${id}`)
.set(this.config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(expect)
}
} }

View File

@ -1,13 +1,17 @@
import TestConfiguration from "../../TestConfiguration" import TestConfiguration from "../../TestConfiguration"
import { TestAPI } from "../base" import { TestAPI } from "../base"
const defaultConfig = { const defaultConfig: RequestSettings = {
expect: 200, expect: 200,
setHeaders: true, setHeaders: true,
skipContentTypeCheck: false, skipContentTypeCheck: false,
} }
export type RequestSettings = typeof defaultConfig export type RequestSettings = {
expect: number | object
setHeaders: boolean
skipContentTypeCheck: boolean
}
export abstract class ScimTestAPI extends TestAPI { export abstract class ScimTestAPI extends TestAPI {
constructor(config: TestConfiguration) { constructor(config: TestConfiguration) {