Merge branch 'master' into feature/app-list-actions
This commit is contained in:
commit
1db358d5c2
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.21.6",
|
||||
"version": "2.21.9",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*",
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import { IdentityContext, VM } from "@budibase/types"
|
||||
import { ExecutionTimeTracker } from "../timers"
|
||||
|
||||
// keep this out of Budibase types, don't want to expose context info
|
||||
export type ContextMap = {
|
||||
|
@ -10,6 +9,6 @@ export type ContextMap = {
|
|||
isScim?: boolean
|
||||
automationId?: string
|
||||
isMigrating?: boolean
|
||||
jsExecutionTracker?: ExecutionTimeTracker
|
||||
vm?: VM
|
||||
cleanup?: (() => void | Promise<void>)[]
|
||||
}
|
||||
|
|
|
@ -20,41 +20,3 @@ export function cleanup() {
|
|||
}
|
||||
intervals = []
|
||||
}
|
||||
|
||||
export class ExecutionTimeoutError extends Error {
|
||||
public readonly name = "ExecutionTimeoutError"
|
||||
}
|
||||
|
||||
export class ExecutionTimeTracker {
|
||||
static withLimit(limitMs: number) {
|
||||
return new ExecutionTimeTracker(limitMs)
|
||||
}
|
||||
|
||||
constructor(readonly limitMs: number) {}
|
||||
|
||||
private totalTimeMs = 0
|
||||
|
||||
track<T>(f: () => T): T {
|
||||
this.checkLimit()
|
||||
const start = process.hrtime.bigint()
|
||||
try {
|
||||
return f()
|
||||
} finally {
|
||||
const end = process.hrtime.bigint()
|
||||
this.totalTimeMs += Number(end - start) / 1e6
|
||||
this.checkLimit()
|
||||
}
|
||||
}
|
||||
|
||||
get elapsedMS() {
|
||||
return this.totalTimeMs
|
||||
}
|
||||
|
||||
checkLimit() {
|
||||
if (this.totalTimeMs > this.limitMs) {
|
||||
throw new ExecutionTimeoutError(
|
||||
`Execution time limit of ${this.limitMs}ms exceeded: ${this.totalTimeMs}ms`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -49,7 +49,8 @@
|
|||
|
||||
$: group = $groups.find(x => x._id === groupId)
|
||||
$: isScimGroup = group?.scimInfo?.isSync
|
||||
$: readonly = !sdk.users.isAdmin($auth.user) || isScimGroup
|
||||
$: isAdmin = sdk.users.isAdmin($auth.user)
|
||||
$: readonly = !isAdmin || isScimGroup
|
||||
$: groupApps = $apps
|
||||
.filter(app =>
|
||||
groups.actions
|
||||
|
@ -123,14 +124,18 @@
|
|||
<span slot="control">
|
||||
<Icon hoverable name="More" />
|
||||
</span>
|
||||
<MenuItem icon="Refresh" on:click={() => editModal.show()}>
|
||||
<MenuItem
|
||||
icon="Refresh"
|
||||
on:click={() => editModal.show()}
|
||||
disabled={!isAdmin}
|
||||
>
|
||||
Edit
|
||||
</MenuItem>
|
||||
<div title={isScimGroup && "Group synced from your AD"}>
|
||||
<MenuItem
|
||||
icon="Delete"
|
||||
on:click={() => deleteModal.show()}
|
||||
disabled={isScimGroup}
|
||||
disabled={readonly}
|
||||
>
|
||||
Delete
|
||||
</MenuItem>
|
||||
|
@ -139,7 +144,7 @@
|
|||
</div>
|
||||
|
||||
<Layout noPadding gap="S">
|
||||
<GroupUsers {groupId} {readonly} />
|
||||
<GroupUsers {groupId} {readonly} {isScimGroup} />
|
||||
</Layout>
|
||||
|
||||
<Layout noPadding gap="S">
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
export let groupId
|
||||
export let readonly
|
||||
export let isScimGroup
|
||||
|
||||
let emailSearch
|
||||
let fetchGroupUsers
|
||||
|
@ -61,10 +62,10 @@
|
|||
</script>
|
||||
|
||||
<div class="header">
|
||||
{#if !readonly}
|
||||
<EditUserPicker {groupId} onUsersUpdated={fetchGroupUsers.getInitialData} />
|
||||
{:else}
|
||||
{#if isScimGroup}
|
||||
<ActiveDirectoryInfo text="Users synced from your AD" />
|
||||
{:else if !readonly}
|
||||
<EditUserPicker {groupId} onUsersUpdated={fetchGroupUsers.getInitialData} />
|
||||
{/if}
|
||||
|
||||
<div class="controls-right">
|
||||
|
|
|
@ -39,9 +39,10 @@
|
|||
name: {
|
||||
width: "1fr",
|
||||
},
|
||||
...(readonly
|
||||
...(!isAdmin
|
||||
? {}
|
||||
: {
|
||||
: // Add
|
||||
{
|
||||
_id: {
|
||||
displayName: "",
|
||||
width: "auto",
|
||||
|
@ -90,7 +91,9 @@
|
|||
$: internalGroups = $groups?.filter(g => !g?.scimInfo?.isSync)
|
||||
|
||||
$: isSSO = !!user?.provider
|
||||
$: readonly = !sdk.users.isAdmin($auth.user) || user?.scimInfo?.isSync
|
||||
$: isAdmin = sdk.users.isAdmin($auth.user)
|
||||
$: isScim = user?.scimInfo?.isSync
|
||||
$: readonly = !isAdmin || isScim
|
||||
$: privileged = sdk.users.isAdminOrGlobalBuilder(user)
|
||||
$: nameLabel = getNameLabel(user)
|
||||
$: filteredGroups = getFilteredGroups(internalGroups, searchTerm)
|
||||
|
@ -322,23 +325,23 @@
|
|||
<Layout gap="S" noPadding>
|
||||
<div class="tableTitle">
|
||||
<Heading size="S">Groups</Heading>
|
||||
{#if internalGroups?.length}
|
||||
{#if internalGroups?.length && isAdmin}
|
||||
<div bind:this={popoverAnchor}>
|
||||
<Button on:click={popover.show()} secondary>Add to group</Button>
|
||||
</div>
|
||||
<Popover align="right" bind:this={popover} anchor={popoverAnchor}>
|
||||
<UserGroupPicker
|
||||
labelKey="name"
|
||||
bind:searchTerm
|
||||
list={filteredGroups}
|
||||
selected={user.userGroups}
|
||||
on:select={e => addGroup(e.detail)}
|
||||
on:deselect={e => removeGroup(e.detail)}
|
||||
iconComponent={GroupIcon}
|
||||
extractIconProps={item => ({ group: item, size: "S" })}
|
||||
/>
|
||||
</Popover>
|
||||
{/if}
|
||||
<Popover align="right" bind:this={popover} anchor={popoverAnchor}>
|
||||
<UserGroupPicker
|
||||
labelKey="name"
|
||||
bind:searchTerm
|
||||
list={filteredGroups}
|
||||
selected={user.userGroups}
|
||||
on:select={e => addGroup(e.detail)}
|
||||
on:deselect={e => removeGroup(e.detail)}
|
||||
iconComponent={GroupIcon}
|
||||
extractIconProps={item => ({ group: item, size: "S" })}
|
||||
/>
|
||||
</Popover>
|
||||
</div>
|
||||
<Table
|
||||
schema={groupSchema}
|
||||
|
|
|
@ -35,7 +35,9 @@ export function createGroupsStore() {
|
|||
get: getGroup,
|
||||
|
||||
save: async group => {
|
||||
const { _scimInfo, ...dataToSave } = group
|
||||
const { ...dataToSave } = group
|
||||
delete dataToSave.scimInfo
|
||||
delete dataToSave.userGroups
|
||||
const response = await API.saveGroup(dataToSave)
|
||||
group._id = response._id
|
||||
group._rev = response._rev
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 4e66a0f7042652763c238b10367310b168905f87
|
||||
Subproject commit c4c98ae70f2e936009250893898ecf11f4ddf2c3
|
|
@ -1,6 +1,7 @@
|
|||
import Router from "@koa/router"
|
||||
import { auth, middleware, env as envCore } from "@budibase/backend-core"
|
||||
import currentApp from "../middleware/currentapp"
|
||||
import cleanup from "../middleware/cleanup"
|
||||
import zlib from "zlib"
|
||||
import { mainRoutes, staticRoutes, publicRoutes } from "./routes"
|
||||
import { middleware as pro } from "@budibase/pro"
|
||||
|
@ -62,6 +63,8 @@ if (apiEnabled()) {
|
|||
.use(auth.auditLog)
|
||||
// @ts-ignore
|
||||
.use(migrations)
|
||||
// @ts-ignore
|
||||
.use(cleanup)
|
||||
|
||||
// authenticated routes
|
||||
for (let route of mainRoutes) {
|
||||
|
|
|
@ -38,11 +38,18 @@ import * as uuid from "uuid"
|
|||
const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
|
||||
tk.freeze(timestamp)
|
||||
|
||||
jest.unmock("mysql2")
|
||||
jest.unmock("mysql2/promise")
|
||||
jest.unmock("mssql")
|
||||
|
||||
const { basicRow } = setup.structures
|
||||
|
||||
describe.each([
|
||||
["internal", undefined],
|
||||
["postgres", databaseTestProviders.postgres],
|
||||
["mysql", databaseTestProviders.mysql],
|
||||
["mssql", databaseTestProviders.mssql],
|
||||
["mariadb", databaseTestProviders.mariadb],
|
||||
])("/rows (%s)", (__, dsProvider) => {
|
||||
const isInternal = !dsProvider
|
||||
|
||||
|
@ -70,7 +77,7 @@ describe.each([
|
|||
|
||||
const generateTableConfig: () => SaveTableRequest = () => {
|
||||
return {
|
||||
name: uuid.v4(),
|
||||
name: uuid.v4().substring(0, 16),
|
||||
type: "table",
|
||||
primary: ["id"],
|
||||
primaryDisplay: "name",
|
||||
|
@ -467,7 +474,6 @@ describe.each([
|
|||
const createRowResponse = await config.api.row.save(
|
||||
createViewResponse.id,
|
||||
{
|
||||
OrderID: "1111",
|
||||
Country: "Aussy",
|
||||
Story: "aaaaa",
|
||||
}
|
||||
|
@ -477,7 +483,7 @@ describe.each([
|
|||
expect(row.Story).toBeUndefined()
|
||||
expect(row).toEqual({
|
||||
...defaultRowFields,
|
||||
OrderID: 1111,
|
||||
OrderID: createRowResponse.OrderID,
|
||||
Country: "Aussy",
|
||||
_id: createRowResponse._id,
|
||||
_rev: createRowResponse._rev,
|
||||
|
@ -641,7 +647,7 @@ describe.each([
|
|||
const createdRow = await config.createRow()
|
||||
|
||||
const res = await config.api.row.bulkDelete(table._id!, {
|
||||
rows: [createdRow, { _id: "2" }],
|
||||
rows: [createdRow, { _id: "9999999" }],
|
||||
})
|
||||
|
||||
expect(res[0]._id).toEqual(createdRow._id)
|
||||
|
|
|
@ -4,11 +4,17 @@ import { QueryOptions } from "../../definitions/datasource"
|
|||
import { isIsoDateString, SqlClient, isValidFilter } from "../utils"
|
||||
import SqlTableQueryBuilder from "./sqlTable"
|
||||
import {
|
||||
BBReferenceFieldMetadata,
|
||||
FieldSchema,
|
||||
FieldSubtype,
|
||||
FieldType,
|
||||
JsonFieldMetadata,
|
||||
Operation,
|
||||
QueryJson,
|
||||
RelationshipsJson,
|
||||
SearchFilters,
|
||||
SortDirection,
|
||||
Table,
|
||||
} from "@budibase/types"
|
||||
import environment from "../../environment"
|
||||
|
||||
|
@ -691,6 +697,37 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
|
|||
return results.length ? results : [{ [operation.toLowerCase()]: true }]
|
||||
}
|
||||
|
||||
convertJsonStringColumns(
|
||||
table: Table,
|
||||
results: Record<string, any>[]
|
||||
): Record<string, any>[] {
|
||||
for (const [name, field] of Object.entries(table.schema)) {
|
||||
if (!this._isJsonColumn(field)) {
|
||||
continue
|
||||
}
|
||||
const fullName = `${table.name}.${name}`
|
||||
for (let row of results) {
|
||||
if (typeof row[fullName] === "string") {
|
||||
row[fullName] = JSON.parse(row[fullName])
|
||||
}
|
||||
if (typeof row[name] === "string") {
|
||||
row[name] = JSON.parse(row[name])
|
||||
}
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
|
||||
_isJsonColumn(
|
||||
field: FieldSchema
|
||||
): field is JsonFieldMetadata | BBReferenceFieldMetadata {
|
||||
return (
|
||||
field.type === FieldType.JSON ||
|
||||
(field.type === FieldType.BB_REFERENCE &&
|
||||
field.subtype === FieldSubtype.USERS)
|
||||
)
|
||||
}
|
||||
|
||||
log(query: string, values?: any[]) {
|
||||
if (!environment.SQL_LOGGING_ENABLE) {
|
||||
return
|
||||
|
|
|
@ -14,6 +14,8 @@ import {
|
|||
Schema,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
FieldType,
|
||||
FieldSubtype,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
|
@ -502,8 +504,14 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
|
|||
}
|
||||
const operation = this._operation(json)
|
||||
const queryFn = (query: any, op: string) => this.internalQuery(query, op)
|
||||
const processFn = (result: any) =>
|
||||
result.recordset ? result.recordset : [{ [operation]: true }]
|
||||
const processFn = (result: any) => {
|
||||
if (json?.meta?.table && result.recordset) {
|
||||
return this.convertJsonStringColumns(json.meta.table, result.recordset)
|
||||
} else if (result.recordset) {
|
||||
return result.recordset
|
||||
}
|
||||
return [{ [operation]: true }]
|
||||
}
|
||||
return this.queryWithReturning(json, queryFn, processFn)
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,8 @@ import {
|
|||
Schema,
|
||||
TableSourceType,
|
||||
DatasourcePlusQueryResponse,
|
||||
FieldType,
|
||||
FieldSubtype,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getSqlQuery,
|
||||
|
@ -386,7 +388,13 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
|
|||
try {
|
||||
const queryFn = (query: any) =>
|
||||
this.internalQuery(query, { connect: false, disableCoercion: true })
|
||||
return await this.queryWithReturning(json, queryFn)
|
||||
const processFn = (result: any) => {
|
||||
if (json?.meta?.table && Array.isArray(result)) {
|
||||
return this.convertJsonStringColumns(json.meta.table, result)
|
||||
}
|
||||
return result
|
||||
}
|
||||
return await this.queryWithReturning(json, queryFn, processFn)
|
||||
} finally {
|
||||
await this.disconnect()
|
||||
}
|
||||
|
|
|
@ -4,6 +4,8 @@ import { Datasource } from "@budibase/types"
|
|||
import * as postgres from "./postgres"
|
||||
import * as mongodb from "./mongodb"
|
||||
import * as mysql from "./mysql"
|
||||
import * as mssql from "./mssql"
|
||||
import * as mariadb from "./mariadb"
|
||||
import { StartedTestContainer } from "testcontainers"
|
||||
|
||||
jest.setTimeout(30000)
|
||||
|
@ -14,4 +16,10 @@ export interface DatabaseProvider {
|
|||
datasource(): Promise<Datasource>
|
||||
}
|
||||
|
||||
export const databaseTestProviders = { postgres, mongodb, mysql }
|
||||
export const databaseTestProviders = {
|
||||
postgres,
|
||||
mongodb,
|
||||
mysql,
|
||||
mssql,
|
||||
mariadb,
|
||||
}
|
||||
|
|
|
@ -0,0 +1,58 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
import { AbstractWaitStrategy } from "testcontainers/build/wait-strategies/wait-strategy"
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
|
||||
class MariaDBWaitStrategy extends AbstractWaitStrategy {
|
||||
async waitUntilReady(container: any, boundPorts: any, startTime?: Date) {
|
||||
// Because MariaDB first starts itself up, runs an init script, then restarts,
|
||||
// it's possible for the mysqladmin ping to succeed early and then tests to
|
||||
// run against a MariaDB that's mid-restart and fail. To get around this, we
|
||||
// wait for logs and then do a ping check.
|
||||
|
||||
const logs = Wait.forLogMessage("mariadbd: ready for connections", 2)
|
||||
await logs.waitUntilReady(container, boundPorts, startTime)
|
||||
|
||||
const command = Wait.forSuccessfulCommand(
|
||||
`mysqladmin ping -h localhost -P 3306 -u root -ppassword`
|
||||
)
|
||||
await command.waitUntilReady(container)
|
||||
}
|
||||
}
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer("mariadb:lts")
|
||||
.withExposedPorts(3306)
|
||||
.withEnvironment({ MARIADB_ROOT_PASSWORD: "password" })
|
||||
.withWaitStrategy(new MariaDBWaitStrategy())
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(3306)
|
||||
|
||||
return {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.MYSQL,
|
||||
plus: true,
|
||||
config: {
|
||||
host,
|
||||
port,
|
||||
user: "root",
|
||||
password: "password",
|
||||
database: "mysql",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
import { Datasource, SourceName } from "@budibase/types"
|
||||
import { GenericContainer, Wait, StartedTestContainer } from "testcontainers"
|
||||
|
||||
let container: StartedTestContainer | undefined
|
||||
|
||||
export async function start(): Promise<StartedTestContainer> {
|
||||
return await new GenericContainer(
|
||||
"mcr.microsoft.com/mssql/server:2022-latest"
|
||||
)
|
||||
.withExposedPorts(1433)
|
||||
.withEnvironment({
|
||||
ACCEPT_EULA: "Y",
|
||||
MSSQL_SA_PASSWORD: "Password_123",
|
||||
// This is important, as Microsoft allow us to use the "Developer" edition
|
||||
// of SQL Server for development and testing purposes. We can't use other
|
||||
// versions without a valid license, and we cannot use the Developer
|
||||
// version in production.
|
||||
MSSQL_PID: "Developer",
|
||||
})
|
||||
.withWaitStrategy(
|
||||
Wait.forSuccessfulCommand(
|
||||
"/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Password_123 -q 'SELECT 1'"
|
||||
)
|
||||
)
|
||||
.start()
|
||||
}
|
||||
|
||||
export async function datasource(): Promise<Datasource> {
|
||||
if (!container) {
|
||||
container = await start()
|
||||
}
|
||||
const host = container.getHost()
|
||||
const port = container.getMappedPort(1433)
|
||||
|
||||
return {
|
||||
type: "datasource_plus",
|
||||
source: SourceName.SQL_SERVER,
|
||||
plus: true,
|
||||
config: {
|
||||
server: host,
|
||||
port,
|
||||
user: "sa",
|
||||
password: "Password_123",
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export async function stop() {
|
||||
if (container) {
|
||||
await container.stop()
|
||||
container = undefined
|
||||
}
|
||||
}
|
|
@ -8,6 +8,7 @@ import {
|
|||
import { context, logging } from "@budibase/backend-core"
|
||||
import tracer from "dd-trace"
|
||||
import { IsolatedVM } from "./vm"
|
||||
import type { VM } from "@budibase/types"
|
||||
|
||||
export function init() {
|
||||
setJSRunner((js: string, ctx: Record<string, any>) => {
|
||||
|
@ -15,18 +16,23 @@ export function init() {
|
|||
try {
|
||||
const bbCtx = context.getCurrentContext()
|
||||
|
||||
const vm = bbCtx?.vm
|
||||
? bbCtx.vm
|
||||
: new IsolatedVM({
|
||||
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
|
||||
invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
|
||||
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
|
||||
}).withHelpers()
|
||||
const vm =
|
||||
bbCtx?.vm ||
|
||||
new IsolatedVM({
|
||||
memoryLimit: env.JS_RUNNER_MEMORY_LIMIT,
|
||||
invocationTimeout: env.JS_PER_INVOCATION_TIMEOUT_MS,
|
||||
isolateAccumulatedTimeout: env.JS_PER_REQUEST_TIMEOUT_MS,
|
||||
}).withHelpers()
|
||||
|
||||
if (bbCtx) {
|
||||
// If we have a context, we want to persist it to reuse the isolate
|
||||
if (bbCtx && !bbCtx.vm) {
|
||||
bbCtx.vm = vm
|
||||
bbCtx.cleanup = bbCtx.cleanup || []
|
||||
bbCtx.cleanup.push(() => vm.close())
|
||||
}
|
||||
|
||||
// Because we can't pass functions into an Isolate, we remove them from
|
||||
// the passed context and rely on the withHelpers() method to add them
|
||||
// back in.
|
||||
const { helpers, ...rest } = ctx
|
||||
return vm.withContext(rest, () => vm.execute(js))
|
||||
} catch (error: any) {
|
||||
|
|
|
@ -195,6 +195,11 @@ export class IsolatedVM implements VM {
|
|||
return result[this.runResultKey]
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this.vm.release()
|
||||
this.isolate.dispose()
|
||||
}
|
||||
|
||||
private registerCallbacks(functions: Record<string, any>) {
|
||||
const libId = crypto.randomUUID().replace(/-/g, "")
|
||||
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
import { Ctx } from "@budibase/types"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { tracer } from "dd-trace"
|
||||
|
||||
export default async (ctx: Ctx, next: any) => {
|
||||
const resp = await next()
|
||||
|
||||
const current = context.getCurrentContext()
|
||||
if (!current || !current.cleanup) {
|
||||
return resp
|
||||
}
|
||||
|
||||
let errors = []
|
||||
for (let fn of current.cleanup) {
|
||||
try {
|
||||
await tracer.trace("cleanup", async span => {
|
||||
await fn()
|
||||
})
|
||||
} catch (e) {
|
||||
// We catch errors here to ensure we at least attempt to run all cleanup
|
||||
// functions. We'll throw the first error we encounter after all cleanup
|
||||
// functions have been run.
|
||||
errors.push(e)
|
||||
}
|
||||
}
|
||||
delete current.cleanup
|
||||
|
||||
if (errors.length > 0) {
|
||||
throw errors[0]
|
||||
}
|
||||
|
||||
return resp
|
||||
}
|
|
@ -1,10 +1,4 @@
|
|||
import {
|
||||
Row,
|
||||
SearchFilters,
|
||||
SearchParams,
|
||||
SortOrder,
|
||||
SortType,
|
||||
} from "@budibase/types"
|
||||
import { Row, SearchFilters, SearchParams, SortOrder } from "@budibase/types"
|
||||
import { isExternalTableID } from "../../../integrations/utils"
|
||||
import * as internal from "./search/internal"
|
||||
import * as external from "./search/external"
|
||||
|
|
|
@ -43,7 +43,7 @@ export class AttachmentCleanup {
|
|||
if ((columnRemoved && !renaming) || opts.deleting) {
|
||||
rows.forEach(row => {
|
||||
files = files.concat(
|
||||
row[key].map((attachment: any) => attachment.key)
|
||||
(row[key] || []).map((attachment: any) => attachment.key)
|
||||
)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -115,4 +115,31 @@ describe("attachment cleanup", () => {
|
|||
await AttachmentCleanup.rowUpdate(table(), { row: row(), oldRow: row() })
|
||||
expect(mockedDeleteFiles).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("should be able to cleanup a column and not throw when attachments are undefined", async () => {
|
||||
const originalTable = table()
|
||||
delete originalTable.schema["attach"]
|
||||
await AttachmentCleanup.tableUpdate(
|
||||
originalTable,
|
||||
[row("file 1"), { attach: undefined }, row("file 2")],
|
||||
{
|
||||
oldTable: table(),
|
||||
}
|
||||
)
|
||||
expect(mockedDeleteFiles).toBeCalledTimes(1)
|
||||
expect(mockedDeleteFiles).toBeCalledWith(BUCKET, ["file 1", "file 2"])
|
||||
})
|
||||
|
||||
it("should be able to cleanup a column and not throw when ALL attachments are undefined", async () => {
|
||||
const originalTable = table()
|
||||
delete originalTable.schema["attach"]
|
||||
await AttachmentCleanup.tableUpdate(
|
||||
originalTable,
|
||||
[{}, { attach: undefined }],
|
||||
{
|
||||
oldTable: table(),
|
||||
}
|
||||
)
|
||||
expect(mockedDeleteFiles).not.toBeCalled()
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
export interface VM {
|
||||
execute(code: string): any
|
||||
withContext<T>(context: Record<string, any>, executeWithContext: () => T): T
|
||||
close(): void
|
||||
}
|
||||
|
|
|
@ -104,17 +104,79 @@ describe("/api/global/groups", () => {
|
|||
expect(events.group.permissionsEdited).not.toBeCalled()
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("should be able to delete a basic group", async () => {
|
||||
const group = structures.groups.UserGroup()
|
||||
let oldGroup = await config.api.groups.saveGroup(group)
|
||||
await config.api.groups.deleteGroup(
|
||||
oldGroup.body._id,
|
||||
oldGroup.body._rev
|
||||
)
|
||||
describe("scim", () => {
|
||||
async function createScimGroup() {
|
||||
mocks.licenses.useScimIntegration()
|
||||
await config.setSCIMConfig(true)
|
||||
|
||||
expect(events.group.deleted).toBeCalledTimes(1)
|
||||
const scimGroup = await config.api.scimGroupsAPI.post({
|
||||
body: structures.scim.createGroupRequest({
|
||||
displayName: generator.word(),
|
||||
}),
|
||||
})
|
||||
|
||||
const { body: group } = await config.api.groups.find(scimGroup.id)
|
||||
|
||||
expect(group).toBeDefined()
|
||||
return group
|
||||
}
|
||||
|
||||
it("update will not allow sending SCIM fields", async () => {
|
||||
const group = await createScimGroup()
|
||||
|
||||
const updatedGroup: UserGroup = {
|
||||
...group,
|
||||
name: generator.word(),
|
||||
}
|
||||
await config.api.groups.saveGroup(updatedGroup, {
|
||||
expect: {
|
||||
message: 'Invalid body - "scimInfo" is not allowed',
|
||||
status: 400,
|
||||
},
|
||||
})
|
||||
|
||||
expect(events.group.updated).not.toBeCalled()
|
||||
})
|
||||
|
||||
it("update will not amend the SCIM fields", async () => {
|
||||
const group: UserGroup = await createScimGroup()
|
||||
|
||||
const updatedGroup: UserGroup = {
|
||||
...group,
|
||||
name: generator.word(),
|
||||
scimInfo: undefined,
|
||||
}
|
||||
|
||||
await config.api.groups.saveGroup(updatedGroup, {
|
||||
expect: 200,
|
||||
})
|
||||
|
||||
expect(events.group.updated).toBeCalledTimes(1)
|
||||
expect(
|
||||
(
|
||||
await config.api.groups.find(group._id!, {
|
||||
expect: 200,
|
||||
})
|
||||
).body
|
||||
).toEqual(
|
||||
expect.objectContaining({
|
||||
...group,
|
||||
name: updatedGroup.name,
|
||||
scimInfo: group.scimInfo,
|
||||
_rev: expect.any(String),
|
||||
})
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("destroy", () => {
|
||||
it("should be able to delete a basic group", async () => {
|
||||
const group = structures.groups.UserGroup()
|
||||
let oldGroup = await config.api.groups.saveGroup(group)
|
||||
await config.api.groups.deleteGroup(oldGroup.body._id, oldGroup.body._rev)
|
||||
|
||||
expect(events.group.deleted).toBeCalledTimes(1)
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -147,7 +209,7 @@ describe("/api/global/groups", () => {
|
|||
|
||||
await Promise.all(
|
||||
Array.from({ length: 30 }).map(async (_, i) => {
|
||||
const email = `user${i}@example.com`
|
||||
const email = `user${i}+${generator.guid()}@example.com`
|
||||
const user = await config.api.users.saveUser({
|
||||
...structures.users.user(),
|
||||
email,
|
||||
|
@ -257,12 +319,16 @@ describe("/api/global/groups", () => {
|
|||
})
|
||||
})
|
||||
|
||||
it("update should return 200", async () => {
|
||||
it("update should return forbidden", async () => {
|
||||
await config.withUser(builder, async () => {
|
||||
await config.api.groups.updateGroupUsers(group._id!, {
|
||||
add: [builder._id!],
|
||||
remove: [],
|
||||
})
|
||||
await config.api.groups.updateGroupUsers(
|
||||
group._id!,
|
||||
{
|
||||
add: [builder._id!],
|
||||
remove: [],
|
||||
},
|
||||
{ expect: 403 }
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -2,6 +2,7 @@ import tk from "timekeeper"
|
|||
import _ from "lodash"
|
||||
import { generator, mocks, structures } from "@budibase/backend-core/tests"
|
||||
import {
|
||||
CloudAccount,
|
||||
ScimCreateUserRequest,
|
||||
ScimGroupResponse,
|
||||
ScimUpdateRequest,
|
||||
|
@ -604,6 +605,25 @@ describe("scim", () => {
|
|||
|
||||
expect(events.user.deleted).toBeCalledTimes(1)
|
||||
})
|
||||
|
||||
it("an account holder cannot be removed even when synched", async () => {
|
||||
const account: CloudAccount = {
|
||||
...structures.accounts.account(),
|
||||
budibaseUserId: user.id,
|
||||
email: user.emails![0].value,
|
||||
}
|
||||
mocks.accounts.getAccount.mockResolvedValue(account)
|
||||
|
||||
await deleteScimUser(user.id, {
|
||||
expect: {
|
||||
message: "Account holder cannot be deleted",
|
||||
status: 400,
|
||||
error: { code: "http" },
|
||||
},
|
||||
})
|
||||
|
||||
await config.api.scimUsersAPI.find(user.id, { expect: 200 })
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
|
|
@ -7,7 +7,10 @@ export class GroupsAPI extends TestAPI {
|
|||
super(config)
|
||||
}
|
||||
|
||||
saveGroup = (group: UserGroup, { expect } = { expect: 200 }) => {
|
||||
saveGroup = (
|
||||
group: UserGroup,
|
||||
{ expect }: { expect: number | object } = { expect: 200 }
|
||||
) => {
|
||||
return this.request
|
||||
.post(`/api/global/groups`)
|
||||
.send(group)
|
||||
|
@ -44,14 +47,15 @@ export class GroupsAPI extends TestAPI {
|
|||
|
||||
updateGroupUsers = (
|
||||
id: string,
|
||||
body: { add: string[]; remove: string[] }
|
||||
body: { add: string[]; remove: string[] },
|
||||
{ expect } = { expect: 200 }
|
||||
) => {
|
||||
return this.request
|
||||
.post(`/api/global/groups/${id}/users`)
|
||||
.send(body)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(200)
|
||||
.expect(expect)
|
||||
}
|
||||
|
||||
fetch = ({ expect } = { expect: 200 }) => {
|
||||
|
@ -61,4 +65,12 @@ export class GroupsAPI extends TestAPI {
|
|||
.expect("Content-Type", /json/)
|
||||
.expect(expect)
|
||||
}
|
||||
|
||||
find = (id: string, { expect } = { expect: 200 }) => {
|
||||
return this.request
|
||||
.get(`/api/global/groups/${id}`)
|
||||
.set(this.config.defaultHeaders())
|
||||
.expect("Content-Type", /json/)
|
||||
.expect(expect)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
import TestConfiguration from "../../TestConfiguration"
|
||||
import { TestAPI } from "../base"
|
||||
|
||||
const defaultConfig = {
|
||||
const defaultConfig: RequestSettings = {
|
||||
expect: 200,
|
||||
setHeaders: true,
|
||||
skipContentTypeCheck: false,
|
||||
}
|
||||
|
||||
export type RequestSettings = typeof defaultConfig
|
||||
export type RequestSettings = {
|
||||
expect: number | object
|
||||
setHeaders: boolean
|
||||
skipContentTypeCheck: boolean
|
||||
}
|
||||
|
||||
export abstract class ScimTestAPI extends TestAPI {
|
||||
constructor(config: TestConfiguration) {
|
||||
|
|
Loading…
Reference in New Issue