Merge branch 'develop' into chore/pro_as_submodule

This commit is contained in:
adrinr 2023-04-18 12:21:38 +01:00
commit 37158594f7
75 changed files with 1135 additions and 463 deletions

View File

@ -62,7 +62,6 @@ jobs:
- name: Build/release Docker images
run: |
docker login -u $DOCKER_USER -p $DOCKER_PASSWORD
yarn build
yarn build:docker:develop
env:
DOCKER_USER: ${{ secrets.DOCKER_USERNAME }}

View File

@ -1,5 +1,5 @@
{
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"npmClient": "yarn",
"packages": [
"packages/backend-core",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/backend-core",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Budibase backend core libraries used in server and worker",
"main": "dist/src/index.js",
"types": "dist/src/index.d.ts",
@ -22,7 +22,7 @@
"dependencies": {
"@budibase/nano": "10.1.2",
"@budibase/pouchdb-replication-stream": "1.2.10",
"@budibase/types": "2.4.44-alpha.19",
"@budibase/types": "2.5.6-alpha.3",
"@shopify/jest-koa-mocks": "5.0.1",
"@techpass/passport-openidconnect": "0.3.2",
"aws-cloudfront-sign": "2.2.0",
@ -52,8 +52,7 @@
"sanitize-s3-objectkey": "0.0.1",
"semver": "7.3.7",
"tar-fs": "2.1.1",
"uuid": "8.3.2",
"zlib": "1.0.5"
"uuid": "8.3.2"
},
"devDependencies": {
"@jest/test-sequencer": "29.5.0",
@ -62,7 +61,7 @@
"@trendyol/jest-testcontainers": "^2.1.1",
"@types/chance": "1.1.3",
"@types/ioredis": "4.28.0",
"@types/jest": "28.1.1",
"@types/jest": "29.5.0",
"@types/koa": "2.13.4",
"@types/lodash": "4.14.180",
"@types/node": "14.18.20",

View File

@ -42,7 +42,11 @@ async function removeDeprecated(db: Database, viewName: ViewName) {
}
}
export async function createView(db: any, viewJs: string, viewName: string) {
export async function createView(
db: any,
viewJs: string,
viewName: string
): Promise<void> {
let designDoc
try {
designDoc = (await db.get(DESIGN_DB)) as DesignDocument
@ -57,7 +61,15 @@ export async function createView(db: any, viewJs: string, viewName: string) {
...designDoc.views,
[viewName]: view,
}
await db.put(designDoc)
try {
await db.put(designDoc)
} catch (err: any) {
if (err.status === 409) {
return await createView(db, viewJs, viewName)
} else {
throw err
}
}
}
export const createNewUserEmailView = async () => {
@ -135,6 +147,10 @@ export const queryView = async <T>(
await removeDeprecated(db, viewName)
await createFunc()
return queryView(viewName, params, db, createFunc, opts)
} else if (err.status === 409) {
// can happen when multiple queries occur at once, view couldn't be created
// other design docs being updated, re-run
return queryView(viewName, params, db, createFunc, opts)
} else {
throw err
}

View File

@ -0,0 +1,29 @@
import { asyncEventQueue, init as initQueue } from "../events/asyncEvents"
import {
ProcessorMap,
default as DocumentUpdateProcessor,
} from "../events/processors/async/DocumentUpdateProcessor"
let processingPromise: Promise<void>
let documentProcessor: DocumentUpdateProcessor
export function init(processors: ProcessorMap) {
if (!asyncEventQueue) {
initQueue()
}
if (!documentProcessor) {
documentProcessor = new DocumentUpdateProcessor(processors)
}
// if not processing in this instance, kick it off
if (!processingPromise) {
processingPromise = asyncEventQueue.process(async job => {
const { event, identity, properties, timestamp } = job.data
await documentProcessor.processEvent(
event,
identity,
properties,
timestamp
)
})
}
}

View File

@ -0,0 +1,2 @@
export * from "./queue"
export * from "./publisher"

View File

@ -0,0 +1,12 @@
import { AsyncEvents } from "@budibase/types"
import { EventPayload, asyncEventQueue, init } from "./queue"
export async function publishAsyncEvent(payload: EventPayload) {
if (!asyncEventQueue) {
init()
}
const { event, identity } = payload
if (AsyncEvents.indexOf(event) !== -1 && identity.tenantId) {
await asyncEventQueue.add(payload)
}
}

View File

@ -0,0 +1,22 @@
import BullQueue from "bull"
import { createQueue, JobQueue } from "../../queue"
import { Event, Identity } from "@budibase/types"
export interface EventPayload {
event: Event
identity: Identity
properties: any
timestamp?: string | number
}
export let asyncEventQueue: BullQueue.Queue
export function init() {
asyncEventQueue = createQueue<EventPayload>(JobQueue.SYSTEM_EVENT_QUEUE)
}
export async function shutdown() {
if (asyncEventQueue) {
await asyncEventQueue.close()
}
}

View File

@ -0,0 +1,56 @@
import {
Event,
UserCreatedEvent,
UserUpdatedEvent,
UserDeletedEvent,
UserPermissionAssignedEvent,
UserPermissionRemovedEvent,
GroupCreatedEvent,
GroupUpdatedEvent,
GroupDeletedEvent,
GroupUsersAddedEvent,
GroupUsersDeletedEvent,
GroupPermissionsEditedEvent,
} from "@budibase/types"
const getEventProperties: Record<
string,
(properties: any) => string | undefined
> = {
[Event.USER_CREATED]: (properties: UserCreatedEvent) => properties.userId,
[Event.USER_UPDATED]: (properties: UserUpdatedEvent) => properties.userId,
[Event.USER_DELETED]: (properties: UserDeletedEvent) => properties.userId,
[Event.USER_PERMISSION_ADMIN_ASSIGNED]: (
properties: UserPermissionAssignedEvent
) => properties.userId,
[Event.USER_PERMISSION_ADMIN_REMOVED]: (
properties: UserPermissionRemovedEvent
) => properties.userId,
[Event.USER_PERMISSION_BUILDER_ASSIGNED]: (
properties: UserPermissionAssignedEvent
) => properties.userId,
[Event.USER_PERMISSION_BUILDER_REMOVED]: (
properties: UserPermissionRemovedEvent
) => properties.userId,
[Event.USER_GROUP_CREATED]: (properties: GroupCreatedEvent) =>
properties.groupId,
[Event.USER_GROUP_UPDATED]: (properties: GroupUpdatedEvent) =>
properties.groupId,
[Event.USER_GROUP_DELETED]: (properties: GroupDeletedEvent) =>
properties.groupId,
[Event.USER_GROUP_USERS_ADDED]: (properties: GroupUsersAddedEvent) =>
properties.groupId,
[Event.USER_GROUP_USERS_REMOVED]: (properties: GroupUsersDeletedEvent) =>
properties.groupId,
[Event.USER_GROUP_PERMISSIONS_EDITED]: (
properties: GroupPermissionsEditedEvent
) => properties.groupId,
}
export function getDocumentId(event: Event, properties: any) {
const extractor = getEventProperties[event]
if (!extractor) {
throw new Error("Event does not have a method of document ID extraction")
}
return extractor(properties)
}

View File

@ -1,7 +1,8 @@
import { Event, AuditedEventFriendlyName } from "@budibase/types"
import { Event } from "@budibase/types"
import { processors } from "./processors"
import identification from "./identification"
import * as backfill from "./backfill"
import { publishAsyncEvent } from "./asyncEvents"
export const publishEvent = async (
event: Event,
@ -14,6 +15,14 @@ export const publishEvent = async (
const backfilling = await backfill.isBackfillingEvent(event)
// no backfill - send the event and exit
if (!backfilling) {
// send off async events if required
await publishAsyncEvent({
event,
identity,
properties,
timestamp,
})
// now handle the main sync event processing pipeline
await processors.processEvent(event, identity, properties, timestamp)
return
}

View File

@ -65,6 +65,7 @@ const getCurrentIdentity = async (): Promise<Identity> => {
hosting,
installationId,
tenantId,
realTenantId: context.getTenantId(),
environment,
}
} else if (identityType === IdentityType.USER) {

View File

@ -6,6 +6,8 @@ export * as backfillCache from "./backfill"
import { processors } from "./processors"
export function initAsyncEvents() {}
export const shutdown = () => {
processors.shutdown()
console.log("Events shutdown")

View File

@ -25,7 +25,9 @@ export default class Processor implements EventProcessor {
timestamp?: string | number
): Promise<void> {
for (const eventProcessor of this.processors) {
await eventProcessor.identify(identity, timestamp)
if (eventProcessor.identify) {
await eventProcessor.identify(identity, timestamp)
}
}
}
@ -34,13 +36,17 @@ export default class Processor implements EventProcessor {
timestamp?: string | number
): Promise<void> {
for (const eventProcessor of this.processors) {
await eventProcessor.identifyGroup(identity, timestamp)
if (eventProcessor.identifyGroup) {
await eventProcessor.identifyGroup(identity, timestamp)
}
}
}
shutdown() {
for (const eventProcessor of this.processors) {
eventProcessor.shutdown()
if (eventProcessor.shutdown) {
eventProcessor.shutdown()
}
}
}
}

View File

@ -0,0 +1,43 @@
import { EventProcessor } from "../types"
import { Event, Identity, DocUpdateEvent } from "@budibase/types"
import { doInTenant } from "../../../context"
import { getDocumentId } from "../../documentId"
import { shutdown } from "../../asyncEvents"
export type Processor = (update: DocUpdateEvent) => Promise<void>
export type ProcessorMap = { events: Event[]; processor: Processor }[]
export default class DocumentUpdateProcessor implements EventProcessor {
processors: ProcessorMap = []
constructor(processors: ProcessorMap) {
this.processors = processors
}
async processEvent(
event: Event,
identity: Identity,
properties: any,
timestamp?: string | number
) {
const tenantId = identity.realTenantId
const docId = getDocumentId(event, properties)
if (!tenantId || !docId) {
return
}
for (let { events, processor } of this.processors) {
if (events.includes(event)) {
await doInTenant(tenantId, async () => {
await processor({
id: docId,
tenantId,
})
})
}
}
}
shutdown() {
return shutdown()
}
}

View File

@ -1,18 +1 @@
import { Event, Identity, Group } from "@budibase/types"
export enum EventProcessorType {
POSTHOG = "posthog",
LOGGING = "logging",
}
export interface EventProcessor {
processEvent(
event: Event,
identity: Identity,
properties: any,
timestamp?: string | number
): Promise<void>
identify(identity: Identity, timestamp?: string | number): Promise<void>
identifyGroup(group: Group, timestamp?: string | number): Promise<void>
shutdown(): void
}
export { EventProcessor } from "@budibase/types"

View File

@ -27,6 +27,7 @@ export * as errors from "./errors"
export * as timers from "./timers"
export { default as env } from "./environment"
export * as blacklist from "./blacklist"
export * as docUpdates from "./docUpdates"
export { SearchParams } from "./db"
// Add context to tenancy for backwards compatibility
// only do this for external usages to prevent internal

View File

@ -3,7 +3,7 @@ import AWS from "aws-sdk"
import stream from "stream"
import fetch from "node-fetch"
import tar from "tar-fs"
const zlib = require("zlib")
import zlib from "zlib"
import { promisify } from "util"
import { join } from "path"
import fs from "fs"
@ -415,7 +415,7 @@ export const downloadTarballDirect = async (
throw new Error(`unexpected response ${response.statusText}`)
}
await streamPipeline(response.body, zlib.Unzip(), tar.extract(path))
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(path))
}
export const downloadTarball = async (
@ -431,7 +431,7 @@ export const downloadTarball = async (
}
const tmpPath = join(budibaseTempDir(), path)
await streamPipeline(response.body, zlib.Unzip(), tar.extract(tmpPath))
await streamPipeline(response.body, zlib.createUnzip(), tar.extract(tmpPath))
if (!env.isTest() && env.SELF_HOSTED) {
await uploadDirectory(bucketName, tmpPath, path)
}

View File

@ -2,4 +2,5 @@ export enum JobQueue {
AUTOMATION = "automationQueue",
APP_BACKUP = "appBackupQueue",
AUDIT_LOG = "auditLogQueue",
SYSTEM_EVENT_QUEUE = "systemEventQueue",
}

View File

@ -24,7 +24,7 @@ export enum PermissionType {
QUERY = "query",
}
class Permission {
export class Permission {
type: PermissionType
level: PermissionLevel
@ -34,7 +34,7 @@ class Permission {
}
}
function levelToNumber(perm: PermissionLevel) {
export function levelToNumber(perm: PermissionLevel) {
switch (perm) {
// not everything has execute privileges
case PermissionLevel.EXECUTE:
@ -55,7 +55,7 @@ function levelToNumber(perm: PermissionLevel) {
* @param {string} userPermLevel The permission level of the user.
* @return {string[]} All the permission levels this user is allowed to carry out.
*/
function getAllowedLevels(userPermLevel: PermissionLevel) {
export function getAllowedLevels(userPermLevel: PermissionLevel): string[] {
switch (userPermLevel) {
case PermissionLevel.EXECUTE:
return [PermissionLevel.EXECUTE]
@ -64,9 +64,9 @@ function getAllowedLevels(userPermLevel: PermissionLevel) {
case PermissionLevel.WRITE:
case PermissionLevel.ADMIN:
return [
PermissionLevel.EXECUTE,
PermissionLevel.READ,
PermissionLevel.WRITE,
PermissionLevel.EXECUTE,
]
default:
return []
@ -81,7 +81,7 @@ export enum BuiltinPermissionID {
POWER = "power",
}
const BUILTIN_PERMISSIONS = {
export const BUILTIN_PERMISSIONS = {
PUBLIC: {
_id: BuiltinPermissionID.PUBLIC,
name: "Public",

View File

@ -0,0 +1,145 @@
import { cloneDeep } from "lodash"
import * as permissions from "../permissions"
import { BUILTIN_ROLE_IDS } from "../roles"
describe("levelToNumber", () => {
it("should return 0 for EXECUTE", () => {
expect(permissions.levelToNumber(permissions.PermissionLevel.EXECUTE)).toBe(
0
)
})
it("should return 1 for READ", () => {
expect(permissions.levelToNumber(permissions.PermissionLevel.READ)).toBe(1)
})
it("should return 2 for WRITE", () => {
expect(permissions.levelToNumber(permissions.PermissionLevel.WRITE)).toBe(2)
})
it("should return 3 for ADMIN", () => {
expect(permissions.levelToNumber(permissions.PermissionLevel.ADMIN)).toBe(3)
})
it("should return -1 for an unknown permission level", () => {
expect(
permissions.levelToNumber("unknown" as permissions.PermissionLevel)
).toBe(-1)
})
})
describe("getAllowedLevels", () => {
it('should return ["execute"] for EXECUTE', () => {
expect(
permissions.getAllowedLevels(permissions.PermissionLevel.EXECUTE)
).toEqual([permissions.PermissionLevel.EXECUTE])
})
it('should return ["execute", "read"] for READ', () => {
expect(
permissions.getAllowedLevels(permissions.PermissionLevel.READ)
).toEqual([
permissions.PermissionLevel.EXECUTE,
permissions.PermissionLevel.READ,
])
})
it('should return ["execute", "read", "write"] for WRITE', () => {
expect(
permissions.getAllowedLevels(permissions.PermissionLevel.WRITE)
).toEqual([
permissions.PermissionLevel.EXECUTE,
permissions.PermissionLevel.READ,
permissions.PermissionLevel.WRITE,
])
})
it('should return ["execute", "read", "write"] for ADMIN', () => {
expect(
permissions.getAllowedLevels(permissions.PermissionLevel.ADMIN)
).toEqual([
permissions.PermissionLevel.EXECUTE,
permissions.PermissionLevel.READ,
permissions.PermissionLevel.WRITE,
])
})
it("should return [] for an unknown permission level", () => {
expect(
permissions.getAllowedLevels("unknown" as permissions.PermissionLevel)
).toEqual([])
})
})
describe("doesHaveBasePermission", () => {
it("should return true if base permission has the required level", () => {
const permType = permissions.PermissionType.USER
const permLevel = permissions.PermissionLevel.READ
const rolesHierarchy = [
{
roleId: BUILTIN_ROLE_IDS.ADMIN,
permissionId: permissions.BuiltinPermissionID.ADMIN,
},
]
expect(
permissions.doesHaveBasePermission(permType, permLevel, rolesHierarchy)
).toBe(true)
})
it("should return false if base permission does not have the required level", () => {
const permType = permissions.PermissionType.APP
const permLevel = permissions.PermissionLevel.READ
const rolesHierarchy = [
{
roleId: BUILTIN_ROLE_IDS.PUBLIC,
permissionId: permissions.BuiltinPermissionID.PUBLIC,
},
]
expect(
permissions.doesHaveBasePermission(permType, permLevel, rolesHierarchy)
).toBe(false)
})
})
describe("isPermissionLevelHigherThanRead", () => {
it("should return true if level is higher than read", () => {
expect(
permissions.isPermissionLevelHigherThanRead(
permissions.PermissionLevel.WRITE
)
).toBe(true)
})
it("should return false if level is read or lower", () => {
expect(
permissions.isPermissionLevelHigherThanRead(
permissions.PermissionLevel.READ
)
).toBe(false)
})
})
describe("getBuiltinPermissions", () => {
it("returns a clone of the builtin permissions", () => {
const builtins = permissions.getBuiltinPermissions()
expect(builtins).toEqual(cloneDeep(permissions.BUILTIN_PERMISSIONS))
expect(builtins).not.toBe(permissions.BUILTIN_PERMISSIONS)
})
})
describe("getBuiltinPermissionByID", () => {
it("returns correct permission object for valid ID", () => {
const expectedPermission = {
_id: permissions.BuiltinPermissionID.PUBLIC,
name: "Public",
permissions: [
new permissions.Permission(
permissions.PermissionType.WEBHOOK,
permissions.PermissionLevel.EXECUTE
),
],
}
expect(permissions.getBuiltinPermissionByID("public")).toEqual(
expectedPermission
)
})
})

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/bbui",
"description": "A UI solution used in the different Budibase projects.",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"license": "MPL-2.0",
"svelte": "src/index.js",
"module": "dist/bbui.es.js",
@ -38,8 +38,8 @@
],
"dependencies": {
"@adobe/spectrum-css-workflow-icons": "1.2.1",
"@budibase/shared-core": "2.4.44-alpha.19",
"@budibase/string-templates": "2.4.44-alpha.19",
"@budibase/shared-core": "2.5.6-alpha.3",
"@budibase/string-templates": "2.5.6-alpha.3",
"@spectrum-css/accordion": "3.0.24",
"@spectrum-css/actionbutton": "1.0.1",
"@spectrum-css/actiongroup": "1.0.1",

View File

@ -10,7 +10,14 @@ export default function positionDropdown(element, opts) {
// Updates the position of the dropdown
const updatePosition = opts => {
const { anchor, align, maxWidth, useAnchorWidth, offset = 5 } = opts
const {
anchor,
align,
maxHeight,
maxWidth,
useAnchorWidth,
offset = 5,
} = opts
if (!anchor) {
return
}
@ -31,10 +38,11 @@ export default function positionDropdown(element, opts) {
styles.top = anchorBounds.top
} else if (window.innerHeight - anchorBounds.bottom < 100) {
styles.top = anchorBounds.top - elementBounds.height - offset
styles.maxHeight = 240
styles.maxHeight = maxHeight || 240
} else {
styles.top = anchorBounds.bottom + offset
styles.maxHeight = window.innerHeight - anchorBounds.bottom - 20
styles.maxHeight =
maxHeight || window.innerHeight - anchorBounds.bottom - 20
}
// Determine horizontal styles

View File

@ -14,6 +14,7 @@
export let align = "right"
export let portalTarget
export let maxWidth
export let maxHeight
export let open = false
export let useAnchorWidth = false
export let dismissible = true
@ -64,6 +65,7 @@
use:positionDropdown={{
anchor,
align,
maxHeight,
maxWidth,
useAnchorWidth,
offset,

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/builder",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"license": "GPL-3.0",
"private": true,
"scripts": {
@ -58,11 +58,11 @@
}
},
"dependencies": {
"@budibase/bbui": "2.4.44-alpha.19",
"@budibase/client": "2.4.44-alpha.19",
"@budibase/frontend-core": "2.4.44-alpha.19",
"@budibase/shared-core": "2.4.44-alpha.19",
"@budibase/string-templates": "2.4.44-alpha.19",
"@budibase/bbui": "2.5.6-alpha.3",
"@budibase/client": "2.5.6-alpha.3",
"@budibase/frontend-core": "2.5.6-alpha.3",
"@budibase/shared-core": "2.5.6-alpha.3",
"@budibase/string-templates": "2.5.6-alpha.3",
"@fortawesome/fontawesome-svg-core": "^6.2.1",
"@fortawesome/free-brands-svg-icons": "^6.2.1",
"@fortawesome/free-solid-svg-icons": "^6.2.1",

View File

@ -120,7 +120,7 @@ export const toBindingsArray = (valueMap, prefix, category) => {
return []
}
return Object.keys(valueMap).reduce((acc, binding) => {
if (!binding || !valueMap[binding]) {
if (!binding) {
return acc
}

View File

@ -42,7 +42,13 @@
</script>
{#if type === "options" && meta.constraints.inclusion.length !== 0}
<Select {label} bind:value options={meta.constraints.inclusion} sort />
<Select
{label}
bind:value
options={meta.constraints.inclusion}
sort
{error}
/>
{:else if type === "datetime"}
<DatePicker
{error}

View File

@ -27,21 +27,19 @@
notifications.success("Row saved successfully")
dispatch("updaterows")
} catch (error) {
if (error.handled) {
const response = error.json
if (response?.errors) {
errors = response.errors
} else if (response?.validationErrors) {
const mappedErrors = {}
for (let field in response.validationErrors) {
mappedErrors[
field
] = `${field} ${response.validationErrors[field][0]}`
}
errors = mappedErrors
const response = error.json
if (error.handled && response?.errors) {
errors = response.errors
} else if (error.handled && response?.validationErrors) {
const mappedErrors = {}
for (let field in response.validationErrors) {
mappedErrors[
field
] = `${field} ${response.validationErrors[field][0]}`
}
errors = mappedErrors
} else {
notifications.error("Failed to save row")
notifications.error(`Failed to save row - ${error.message}`)
}
// Prevent modal closing if there were errors
return false

View File

@ -13,12 +13,7 @@
<div bind:this={popoverAnchor} class="help">
<button class="openMenu" on:click={show}>Help</button>
<Popover
class="helpMenuPopoverOverride"
bind:show
bind:hide
anchor={popoverAnchor}
>
<Popover maxHeight={1000} bind:show bind:hide anchor={popoverAnchor}>
<nav class="helpMenu">
<div class="header">
<Heading size="XS">Help resources</Heading>

View File

@ -75,8 +75,7 @@
let loaded = false
let editModal, deleteModal
const scimEnabled = $features.isScimEnabled
$: scimEnabled = $features.isScimEnabled
$: readonly = !$auth.isAdmin || scimEnabled
$: page = $pageInfo.page
$: fetchUsers(page, searchTerm)

View File

@ -86,8 +86,7 @@
let user
let loaded = false
const scimEnabled = $features.isScimEnabled
$: scimEnabled = $features.isScimEnabled
$: isSSO = !!user?.provider
$: readonly = !$auth.isAdmin || scimEnabled
$: privileged = user?.admin?.global || user?.builder?.global

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/cli",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Budibase CLI, for developers, self hosting and migrations.",
"main": "dist/index.js",
"bin": {
@ -29,9 +29,9 @@
"outputPath": "build"
},
"dependencies": {
"@budibase/backend-core": "2.4.44-alpha.19",
"@budibase/string-templates": "2.4.44-alpha.19",
"@budibase/types": "2.4.44-alpha.19",
"@budibase/backend-core": "2.5.6-alpha.3",
"@budibase/string-templates": "2.5.6-alpha.3",
"@budibase/types": "2.5.6-alpha.3",
"axios": "0.21.2",
"chalk": "4.1.0",
"cli-progress": "3.11.2",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/client",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"license": "MPL-2.0",
"module": "dist/budibase-client.js",
"main": "dist/budibase-client.js",
@ -19,11 +19,11 @@
"dev:builder": "rollup -cw"
},
"dependencies": {
"@budibase/bbui": "2.4.44-alpha.19",
"@budibase/frontend-core": "2.4.44-alpha.19",
"@budibase/shared-core": "2.4.44-alpha.19",
"@budibase/string-templates": "2.4.44-alpha.19",
"@budibase/types": "2.4.44-alpha.19",
"@budibase/bbui": "2.5.6-alpha.3",
"@budibase/frontend-core": "2.5.6-alpha.3",
"@budibase/shared-core": "2.5.6-alpha.3",
"@budibase/string-templates": "2.5.6-alpha.3",
"@budibase/types": "2.5.6-alpha.3",
"@spectrum-css/button": "^3.0.3",
"@spectrum-css/card": "^3.0.3",
"@spectrum-css/divider": "^1.0.3",

View File

@ -1,13 +1,13 @@
{
"name": "@budibase/frontend-core",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Budibase frontend core libraries used in builder and client",
"author": "Budibase",
"license": "MPL-2.0",
"svelte": "src/index.js",
"dependencies": {
"@budibase/bbui": "2.4.44-alpha.19",
"@budibase/shared-core": "2.4.44-alpha.19",
"@budibase/bbui": "2.5.6-alpha.3",
"@budibase/shared-core": "2.5.6-alpha.3",
"lodash": "^4.17.21",
"svelte": "^3.46.2"
}

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/sdk",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Budibase Public API SDK",
"author": "Budibase",
"license": "MPL-2.0",

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/server",
"email": "hi@budibase.com",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Budibase Web Server",
"main": "src/index.ts",
"repository": {
@ -45,12 +45,12 @@
"license": "GPL-3.0",
"dependencies": {
"@apidevtools/swagger-parser": "10.0.3",
"@budibase/backend-core": "2.4.44-alpha.19",
"@budibase/client": "2.4.44-alpha.19",
"@budibase/pro": "2.4.44-alpha.19",
"@budibase/shared-core": "2.4.44-alpha.19",
"@budibase/string-templates": "2.4.44-alpha.19",
"@budibase/types": "2.4.44-alpha.19",
"@budibase/backend-core": "2.5.6-alpha.3",
"@budibase/client": "2.5.6-alpha.3",
"@budibase/pro": "2.5.6-alpha.3",
"@budibase/shared-core": "2.5.6-alpha.3",
"@budibase/string-templates": "2.5.6-alpha.3",
"@budibase/types": "2.5.6-alpha.3",
"@bull-board/api": "3.7.0",
"@bull-board/koa": "3.9.4",
"@elastic/elasticsearch": "7.10.0",
@ -117,9 +117,8 @@
"validate.js": "0.13.1",
"vm2": "3.9.16",
"worker-farm": "1.7.0",
"xml2js": "0.4.23",
"yargs": "13.2.4",
"zlib": "1.0.5"
"xml2js": "0.5.0",
"yargs": "13.2.4"
},
"devDependencies": {
"@babel/core": "7.17.4",

View File

@ -11,6 +11,7 @@ if [ "$1" = '/opt/mssql/bin/sqlservr' ]; then
echo "RUNNING BUDIBASE SETUP"
cat setup.sql
#run the setup script to create the DB and the schema in the DB
/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P Passw0rd -i setup.sql

View File

@ -34,7 +34,7 @@ GO
CREATE TABLE people
(
name varchar(30) NOT NULL,
age varchar(20),
age int default 20 NOT NULL,
CONSTRAINT pk_people PRIMARY KEY NONCLUSTERED (name, age)
);
@ -50,22 +50,22 @@ VALUES
('Processing', 1);
INSERT INTO people (name, age)
VALUES ('Bob', '30'),
('Bert', '10'),
('Jack', '12'),
('Mike', '31'),
('Dave', '44'),
('Jim', '43'),
('Kerry', '32'),
('Julie', '12'),
('Kim', '55'),
('Andy', '33'),
('John', '22'),
('Ruth', '66'),
('Robert', '88'),
('Bobert', '99'),
('Jan', '22'),
('Megan', '11');
VALUES ('Bob', 30),
('Bert', 10),
('Jack', 12),
('Mike', 31),
('Dave', 44),
('Jim', 43),
('Kerry', 32),
('Julie', 12),
('Kim', 55),
('Andy', 33),
('John', 22),
('Ruth', 66),
('Robert', 88),
('Bobert', 99),
('Jan', 22),
('Megan', 11);
IF OBJECT_ID ('Chains.sizes', 'U') IS NOT NULL

View File

@ -3,7 +3,7 @@ USE main;
CREATE TABLE Persons (
PersonID int NOT NULL AUTO_INCREMENT,
CreatedAt datetime,
Age float,
Age float DEFAULT 20 NOT NULL,
LastName varchar(255),
FirstName varchar(255),
Address varchar(255),

View File

@ -8,6 +8,7 @@ CREATE TABLE Persons (
FirstName varchar(255),
Address varchar(255),
City varchar(255) DEFAULT 'Belfast',
Age INTEGER DEFAULT 20 NOT NULL,
Type person_job
);
CREATE TABLE Tasks (

View File

@ -30,7 +30,6 @@ import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import { csv, json, jsonWithSchema, Format } from "../view/exporters"
import { apiFileReturn } from "../../../utilities/fileSystem"
import {
Ctx,
UserCtx,
Database,
LinkDocumentValue,
@ -72,7 +71,7 @@ async function getView(db: Database, viewName: string) {
return viewInfo
}
async function getRawTableData(ctx: Ctx, db: Database, tableId: string) {
async function getRawTableData(ctx: UserCtx, db: Database, tableId: string) {
let rows
if (tableId === InternalTables.USER_METADATA) {
await userController.fetchMetadata(ctx)
@ -188,7 +187,7 @@ export async function save(ctx: UserCtx) {
})
}
export async function fetchView(ctx: Ctx) {
export async function fetchView(ctx: UserCtx) {
const viewName = decodeURIComponent(ctx.params.viewName)
// if this is a table view being looked for just transfer to that
@ -255,7 +254,7 @@ export async function fetchView(ctx: Ctx) {
return rows
}
export async function fetch(ctx: Ctx) {
export async function fetch(ctx: UserCtx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
@ -264,7 +263,7 @@ export async function fetch(ctx: Ctx) {
return outputProcessing(table, rows)
}
export async function find(ctx: Ctx) {
export async function find(ctx: UserCtx) {
const db = dbCore.getDB(ctx.appId)
const table = await db.get(ctx.params.tableId)
let row = await utils.findRow(ctx, ctx.params.tableId, ctx.params.rowId)
@ -272,7 +271,7 @@ export async function find(ctx: Ctx) {
return row
}
export async function destroy(ctx: Ctx) {
export async function destroy(ctx: UserCtx) {
const db = context.getAppDB()
const { _id } = ctx.request.body
let row = await db.get(_id)
@ -308,7 +307,7 @@ export async function destroy(ctx: Ctx) {
return { response, row }
}
export async function bulkDestroy(ctx: Ctx) {
export async function bulkDestroy(ctx: UserCtx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
const table = await db.get(tableId)
@ -347,7 +346,7 @@ export async function bulkDestroy(ctx: Ctx) {
return { response: { ok: true }, rows: processedRows }
}
export async function search(ctx: Ctx) {
export async function search(ctx: UserCtx) {
// Fetch the whole table when running in cypress, as search doesn't work
if (!env.COUCH_DB_URL && env.isCypress()) {
return { rows: await fetch(ctx) }
@ -387,7 +386,7 @@ export async function search(ctx: Ctx) {
return response
}
export async function exportRows(ctx: Ctx) {
export async function exportRows(ctx: UserCtx) {
const db = context.getAppDB()
const table = await db.get(ctx.params.tableId)
const rowIds = ctx.request.body.rows
@ -439,7 +438,7 @@ export async function exportRows(ctx: Ctx) {
}
}
export async function fetchEnrichedRow(ctx: Ctx) {
export async function fetchEnrichedRow(ctx: UserCtx) {
const db = context.getAppDB()
const tableId = ctx.params.tableId
const rowId = ctx.params.rowId

View File

@ -5,7 +5,7 @@ import { context } from "@budibase/backend-core"
import { makeExternalQuery } from "../../../integrations/base/query"
import { Row, Table } from "@budibase/types"
import { Format } from "../view/exporters"
import { Ctx } from "@budibase/types"
import { UserCtx } from "@budibase/types"
import sdk from "../../../sdk"
const validateJs = require("validate.js")
const { cloneDeep } = require("lodash/fp")
@ -26,7 +26,7 @@ export async function getDatasourceAndQuery(json: any) {
return makeExternalQuery(datasource, json)
}
export async function findRow(ctx: Ctx, tableId: string, rowId: string) {
export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
const db = context.getAppDB()
let row
// TODO remove special user case in future

View File

@ -1,98 +1,12 @@
import { generateUserMetadataID, generateUserFlagID } from "../../db/utils"
import { InternalTables } from "../../db/utils"
import { getGlobalUsers, getRawGlobalUser } from "../../utilities/global"
import { getGlobalUsers } from "../../utilities/global"
import { getFullUser } from "../../utilities/users"
import {
context,
roles as rolesCore,
db as dbCore,
} from "@budibase/backend-core"
import { BBContext, Ctx, SyncUserRequest, User } from "@budibase/types"
import { context } from "@budibase/backend-core"
import { UserCtx } from "@budibase/types"
import sdk from "../../sdk"
export async function syncUser(ctx: Ctx<SyncUserRequest>) {
let deleting = false,
user: User | any
const userId = ctx.params.id
const previousUser = ctx.request.body?.previousUser
try {
user = (await getRawGlobalUser(userId)) as User
} catch (err: any) {
if (err && err.status === 404) {
user = {}
deleting = true
} else {
throw err
}
}
let previousApps = previousUser
? Object.keys(previousUser.roles).map(appId => appId)
: []
const roles = deleting ? {} : user.roles
// remove props which aren't useful to metadata
delete user.password
delete user.forceResetPassword
delete user.roles
// run through all production appIDs in the users roles
let prodAppIds
// if they are a builder then get all production app IDs
if ((user.builder && user.builder.global) || deleting) {
prodAppIds = await dbCore.getProdAppIDs()
} else {
prodAppIds = Object.entries(roles)
.filter(entry => entry[1] !== rolesCore.BUILTIN_ROLE_IDS.PUBLIC)
.map(([appId]) => appId)
}
for (let prodAppId of new Set([...prodAppIds, ...previousApps])) {
const roleId = roles[prodAppId]
const deleteFromApp = !roleId
const devAppId = dbCore.getDevelopmentAppID(prodAppId)
for (let appId of [prodAppId, devAppId]) {
if (!(await dbCore.dbExists(appId))) {
continue
}
await context.doInAppContext(appId, async () => {
const db = context.getAppDB()
const metadataId = generateUserMetadataID(userId)
let metadata
try {
metadata = await db.get(metadataId)
} catch (err) {
if (deleteFromApp) {
return
}
metadata = {
tableId: InternalTables.USER_METADATA,
}
}
if (deleteFromApp) {
await db.remove(metadata)
return
}
// assign the roleId for the metadata doc
if (roleId) {
metadata.roleId = roleId
}
let combined = sdk.users.combineMetadataAndUser(user, metadata)
// if its null then there was no updates required
if (combined) {
await db.put(combined)
}
})
}
}
ctx.body = {
message: "User synced.",
}
}
export async function fetchMetadata(ctx: BBContext) {
export async function fetchMetadata(ctx: UserCtx) {
const global = await getGlobalUsers()
const metadata = await sdk.users.rawUserMetadata()
const users = []
@ -111,7 +25,7 @@ export async function fetchMetadata(ctx: BBContext) {
ctx.body = users
}
export async function updateSelfMetadata(ctx: BBContext) {
export async function updateSelfMetadata(ctx: UserCtx) {
// overwrite the ID with current users
ctx.request.body._id = ctx.user?._id
// make sure no stale rev
@ -121,7 +35,7 @@ export async function updateSelfMetadata(ctx: BBContext) {
await updateMetadata(ctx)
}
export async function updateMetadata(ctx: BBContext) {
export async function updateMetadata(ctx: UserCtx) {
const db = context.getAppDB()
const user = ctx.request.body
// this isn't applicable to the user
@ -133,7 +47,7 @@ export async function updateMetadata(ctx: BBContext) {
ctx.body = await db.put(metadata)
}
export async function destroyMetadata(ctx: BBContext) {
export async function destroyMetadata(ctx: UserCtx) {
const db = context.getAppDB()
try {
const dbUser = await db.get(ctx.params.id)
@ -146,11 +60,11 @@ export async function destroyMetadata(ctx: BBContext) {
}
}
export async function findMetadata(ctx: BBContext) {
export async function findMetadata(ctx: UserCtx) {
ctx.body = await getFullUser(ctx, ctx.params.id)
}
export async function setFlag(ctx: BBContext) {
export async function setFlag(ctx: UserCtx) {
const userId = ctx.user?._id
const { flag, value } = ctx.request.body
if (!flag) {
@ -169,7 +83,7 @@ export async function setFlag(ctx: BBContext) {
ctx.body = { message: "Flag set successfully" }
}
export async function getFlags(ctx: BBContext) {
export async function getFlags(ctx: UserCtx) {
const userId = ctx.user?._id
const docId = generateUserFlagID(userId!)
const db = context.getAppDB()

View File

@ -205,41 +205,4 @@ describe("/users", () => {
expect(res.body.message).toEqual("Flag set successfully")
})
})
describe("syncUser", () => {
it("should sync the user", async () => {
let user = await config.createUser()
await config.createApp("New App")
let res = await request
.post(`/api/users/metadata/sync/${user._id}`)
.set(config.defaultHeaders())
.expect(200)
.expect("Content-Type", /json/)
expect(res.body.message).toEqual("User synced.")
})
it("should sync the user when a previous user is specified", async () => {
const app1 = await config.createApp("App 1")
const app2 = await config.createApp("App 2")
let user = await config.createUser({
builder: false,
admin: true,
roles: { [app1.appId]: "ADMIN" },
})
let res = await request
.post(`/api/users/metadata/sync/${user._id}`)
.set(config.defaultHeaders())
.send({
previousUser: {
...user,
roles: { ...user.roles, [app2.appId]: "BASIC" },
},
})
.expect(200)
.expect("Content-Type", /json/)
expect(res.body.message).toEqual("User synced.")
})
})
})

View File

@ -32,11 +32,6 @@ router
authorized(PermissionType.USER, PermissionLevel.WRITE),
controller.destroyMetadata
)
.post(
"/api/users/metadata/sync/:id",
authorized(PermissionType.USER, PermissionLevel.WRITE),
controller.syncUser
)
.post(
"/api/users/flags",
authorized(PermissionType.USER, PermissionLevel.WRITE),

View File

@ -0,0 +1 @@
export * from "./processors"

View File

@ -0,0 +1,14 @@
import userGroupProcessor from "./syncUsers"
import { docUpdates } from "@budibase/backend-core"
export type UpdateCallback = (docId: string) => void
let started = false
export function init(updateCb?: UpdateCallback) {
if (started) {
return
}
const processors = [userGroupProcessor(updateCb)]
docUpdates.init(processors)
started = true
}

View File

@ -0,0 +1,35 @@
import { constants, logging } from "@budibase/backend-core"
import { sdk as proSdk } from "@budibase/pro"
import { DocUpdateEvent, UserGroupSyncEvents } from "@budibase/types"
import { syncUsersToAllApps } from "../../sdk/app/applications/sync"
import { UpdateCallback } from "./processors"
export default function process(updateCb?: UpdateCallback) {
const processor = async (update: DocUpdateEvent) => {
try {
const docId = update.id
const isGroup = docId.startsWith(constants.DocumentType.GROUP)
let userIds: string[]
if (isGroup) {
const group = await proSdk.groups.get(docId)
userIds = group.users?.map(user => user._id) || []
} else {
userIds = [docId]
}
if (userIds.length > 0) {
await syncUsersToAllApps(userIds)
}
if (updateCb) {
updateCb(docId)
}
} catch (err: any) {
// if something not found - no changes to perform
if (err?.status === 404) {
return
} else {
logging.logAlert("Failed to perform user/group app sync", err)
}
}
}
return { events: UserGroupSyncEvents, processor }
}

View File

@ -2,4 +2,5 @@ import BudibaseEmitter from "./BudibaseEmitter"
const emitter = new BudibaseEmitter()
export { init } from "./docUpdates"
export default emitter

View File

@ -243,11 +243,14 @@ class SqlServerIntegration extends Sql implements DatasourcePlus {
if (typeof name !== "string") {
continue
}
const hasDefault = def.COLUMN_DEFAULT
const isAuto = !!autoColumns.find(col => col === name)
const required = !!requiredColumns.find(col => col === name)
schema[name] = {
autocolumn: !!autoColumns.find(col => col === name),
autocolumn: isAuto,
name: name,
constraints: {
presence: requiredColumns.find(col => col === name),
presence: required && !isAuto && !hasDefault,
},
...convertSqlType(def.DATA_TYPE),
externalType: def.DATA_TYPE,

View File

@ -229,13 +229,15 @@ class MySQLIntegration extends Sql implements DatasourcePlus {
if (column.Key === "PRI" && primaryKeys.indexOf(column.Key) === -1) {
primaryKeys.push(columnName)
}
const constraints = {
presence: column.Null !== "YES",
}
const hasDefault = column.Default != null
const isAuto: boolean =
typeof column.Extra === "string" &&
(column.Extra === "auto_increment" ||
column.Extra.toLowerCase().includes("generated"))
const required = column.Null !== "YES"
const constraints = {
presence: required && !isAuto && !hasDefault,
}
schema[columnName] = {
name: columnName,
autocolumn: isAuto,

View File

@ -262,15 +262,17 @@ class PostgresIntegration extends Sql implements DatasourcePlus {
column.identity_start ||
column.identity_increment
)
const constraints = {
presence: column.is_nullable === "NO",
}
const hasDefault =
const hasDefault = column.column_default != null
const hasNextVal =
typeof column.column_default === "string" &&
column.column_default.startsWith("nextval")
const isGenerated =
column.is_generated && column.is_generated !== "NEVER"
const isAuto: boolean = hasDefault || identity || isGenerated
const isAuto: boolean = hasNextVal || identity || isGenerated
const required = column.is_nullable === "NO"
const constraints = {
presence: required && !hasDefault && !isGenerated,
}
tables[tableName].schema[columnName] = {
autocolumn: isAuto,
name: columnName,

View File

@ -1,6 +1,117 @@
import env from "../../../environment"
import { db as dbCore, context } from "@budibase/backend-core"
import {
db as dbCore,
context,
docUpdates,
constants,
logging,
roles,
} from "@budibase/backend-core"
import { User, ContextUser, UserGroup } from "@budibase/types"
import { sdk as proSdk } from "@budibase/pro"
import sdk from "../../"
import { getGlobalUsers, processUser } from "../../../utilities/global"
import { generateUserMetadataID, InternalTables } from "../../../db/utils"
type DeletedUser = { _id: string; deleted: boolean }
async function syncUsersToApp(
appId: string,
users: (User | DeletedUser)[],
groups: UserGroup[]
) {
if (!(await dbCore.dbExists(appId))) {
return
}
await context.doInAppContext(appId, async () => {
const db = context.getAppDB()
for (let user of users) {
let ctxUser = user as ContextUser
let deletedUser = false
const metadataId = generateUserMetadataID(user._id!)
if ((user as DeletedUser).deleted) {
deletedUser = true
}
// make sure role is correct
if (!deletedUser) {
ctxUser = await processUser(ctxUser, { appId, groups })
}
let roleId = ctxUser.roleId
if (roleId === roles.BUILTIN_ROLE_IDS.PUBLIC) {
roleId = undefined
}
let metadata
try {
metadata = await db.get(metadataId)
} catch (err: any) {
if (err.status !== 404) {
throw err
}
// no metadata and user is to be deleted, can skip
// no role - user isn't in app anyway
if (!roleId) {
continue
} else if (!deletedUser) {
// doesn't exist yet, creating it
metadata = {
tableId: InternalTables.USER_METADATA,
}
}
}
// the user doesn't exist, or doesn't have a role anymore
// get rid of their metadata
if (deletedUser || !roleId) {
await db.remove(metadata)
continue
}
// assign the roleId for the metadata doc
if (roleId) {
metadata.roleId = roleId
}
let combined = sdk.users.combineMetadataAndUser(ctxUser, metadata)
// if no combined returned, there are no updates to make
if (combined) {
await db.put(combined)
}
}
})
}
export async function syncUsersToAllApps(userIds: string[]) {
// list of users, if one has been deleted it will be undefined in array
const users = (await getGlobalUsers(userIds, {
noProcessing: true,
})) as User[]
const groups = await proSdk.groups.fetch()
const finalUsers: (User | DeletedUser)[] = []
for (let userId of userIds) {
const user = users.find(user => user._id === userId)
if (!user) {
finalUsers.push({ _id: userId, deleted: true })
} else {
finalUsers.push(user)
}
}
const devAppIds = await dbCore.getDevAppIDs()
let promises = []
for (let devAppId of devAppIds) {
const prodAppId = dbCore.getProdAppID(devAppId)
for (let appId of [prodAppId, devAppId]) {
promises.push(syncUsersToApp(appId, finalUsers, groups))
}
}
const resp = await Promise.allSettled(promises)
const failed = resp.filter(promise => promise.status === "rejected")
if (failed.length > 0) {
const reasons = failed.map(fail => (fail as PromiseRejectedResult).reason)
logging.logAlert("Failed to sync users to apps", reasons)
}
}
export async function syncApp(
appId: string,
@ -23,32 +134,28 @@ export async function syncApp(
// specific case, want to make sure setup is skipped
const prodDb = context.getProdAppDB({ skip_setup: true })
const exists = await prodDb.exists()
if (!exists) {
// the database doesn't exist. Don't replicate
return {
message: "App sync not required, app not deployed.",
}
}
const replication = new dbCore.Replication({
source: prodAppId,
target: appId,
})
let error
try {
const replOpts = replication.appReplicateOpts()
if (opts?.automationOnly) {
replOpts.filter = (doc: any) =>
doc._id.startsWith(dbCore.DocumentType.AUTOMATION)
if (exists) {
const replication = new dbCore.Replication({
source: prodAppId,
target: appId,
})
try {
const replOpts = replication.appReplicateOpts()
if (opts?.automationOnly) {
replOpts.filter = (doc: any) =>
doc._id.startsWith(dbCore.DocumentType.AUTOMATION)
}
await replication.replicate(replOpts)
} catch (err) {
error = err
} finally {
await replication.close()
}
await replication.replicate(replOpts)
} catch (err) {
error = err
} finally {
await replication.close()
}
// sync the users
// sync the users - kept for safe keeping
await sdk.users.syncGlobalUsers()
if (error) {

View File

@ -0,0 +1,137 @@
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
import { events, context, roles, constants } from "@budibase/backend-core"
import { init } from "../../../../events"
import { rawUserMetadata } from "../../../users/utils"
import EventEmitter from "events"
import { UserGroup, UserMetadata, UserRoles, User } from "@budibase/types"
const config = new TestConfiguration()
let app, group: UserGroup, groupUser: User
const ROLE_ID = roles.BUILTIN_ROLE_IDS.BASIC
const emitter = new EventEmitter()
function updateCb(docId: string) {
const isGroup = docId.startsWith(constants.DocumentType.GROUP)
if (isGroup) {
emitter.emit("update-group")
} else {
emitter.emit("update-user")
}
}
init(updateCb)
function waitForUpdate(opts: { group?: boolean }) {
return new Promise<void>((resolve, reject) => {
const timeout = setTimeout(() => {
reject()
}, 5000)
const event = opts?.group ? "update-group" : "update-user"
emitter.on(event, () => {
clearTimeout(timeout)
resolve()
})
})
}
beforeAll(async () => {
app = await config.init("syncApp")
})
async function createUser(email: string, roles: UserRoles, builder?: boolean) {
const user = await config.createUser({
email,
roles,
builder: builder || false,
admin: false,
})
await context.doInContext(config.appId!, async () => {
await events.user.created(user)
})
return user
}
async function removeUserRole(user: User) {
const final = await config.globalUser({
...user,
id: user._id,
roles: {},
builder: false,
admin: false,
})
await context.doInContext(config.appId!, async () => {
await events.user.updated(final)
})
}
async function createGroupAndUser(email: string) {
groupUser = await config.createUser({
email,
roles: {},
builder: false,
admin: false,
})
group = await config.createGroup()
await config.addUserToGroup(group._id!, groupUser._id!)
}
async function removeUserFromGroup() {
await config.removeUserFromGroup(group._id!, groupUser._id!)
return context.doInContext(config.appId!, async () => {
await events.user.updated(groupUser)
})
}
async function getUserMetadata(): Promise<UserMetadata[]> {
return context.doInContext(config.appId!, async () => {
return await rawUserMetadata()
})
}
function buildRoles() {
return { [config.prodAppId!]: ROLE_ID }
}
describe("app user/group sync", () => {
const groupEmail = "test2@test.com",
normalEmail = "test@test.com"
async function checkEmail(
email: string,
opts?: { group?: boolean; notFound?: boolean }
) {
await waitForUpdate(opts || {})
const metadata = await getUserMetadata()
const found = metadata.find(data => data.email === email)
if (opts?.notFound) {
expect(found).toBeUndefined()
} else {
expect(found).toBeDefined()
}
}
it("should be able to sync a new user, add then remove", async () => {
const user = await createUser(normalEmail, buildRoles())
await checkEmail(normalEmail)
await removeUserRole(user)
await checkEmail(normalEmail, { notFound: true })
})
it("should be able to sync a group", async () => {
await createGroupAndUser(groupEmail)
await checkEmail(groupEmail, { group: true })
})
it("should be able to remove user from group", async () => {
if (!group) {
await createGroupAndUser(groupEmail)
}
await removeUserFromGroup()
await checkEmail(groupEmail, { notFound: true })
})
it("should be able to handle builder users", async () => {
await createUser("test3@test.com", {}, true)
await checkEmail("test3@test.com")
})
})

View File

@ -121,38 +121,7 @@ describe("syncGlobalUsers", () => {
await syncGlobalUsers()
const metadata = await rawUserMetadata()
expect(metadata).toHaveLength(1)
})
})
})
it("app users are removed when app is removed from user group", async () => {
await config.doInTenant(async () => {
const group = await proSdk.groups.save(structures.userGroups.userGroup())
const user1 = await config.createUser({ admin: false, builder: false })
const user2 = await config.createUser({ admin: false, builder: false })
await proSdk.groups.updateGroupApps(group.id, {
appsToAdd: [
{ appId: config.prodAppId!, roleId: roles.BUILTIN_ROLE_IDS.BASIC },
],
})
await proSdk.groups.addUsers(group.id, [user1._id, user2._id])
await config.doInContext(config.appId, async () => {
await syncGlobalUsers()
expect(await rawUserMetadata()).toHaveLength(3)
await proSdk.groups.removeUsers(group.id, [user1._id])
await syncGlobalUsers()
const metadata = await rawUserMetadata()
expect(metadata).toHaveLength(2)
expect(metadata).not.toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user1._id),
})
)
expect(metadata).toHaveLength(0)
})
})
})

View File

@ -1,12 +1,13 @@
import { getGlobalUsers } from "../../utilities/global"
import { context, roles as rolesCore } from "@budibase/backend-core"
import {
getGlobalIDFromUserMetadataID,
generateUserMetadataID,
getUserMetadataParams,
InternalTables,
} from "../../db/utils"
import { isEqual } from "lodash"
import { ContextUser, UserMetadata } from "@budibase/types"
import { ContextUser, UserMetadata, User } from "@budibase/types"
export function combineMetadataAndUser(
user: ContextUser,
@ -37,6 +38,10 @@ export function combineMetadataAndUser(
if (found) {
newDoc._rev = found._rev
}
// clear fields that shouldn't be in metadata
delete newDoc.password
delete newDoc.forceResetPassword
delete newDoc.roles
if (found == null || !isEqual(newDoc, found)) {
return {
...found,
@ -60,10 +65,9 @@ export async function rawUserMetadata() {
export async function syncGlobalUsers() {
// sync user metadata
const db = context.getAppDB()
const [users, metadata] = await Promise.all([
getGlobalUsers(),
rawUserMetadata(),
])
const resp = await Promise.all([getGlobalUsers(), rawUserMetadata()])
const users = resp[0] as User[]
const metadata = resp[1] as UserMetadata[]
const toWrite = []
for (let user of users) {
const combined = combineMetadataAndUser(user, metadata)
@ -71,5 +75,19 @@ export async function syncGlobalUsers() {
toWrite.push(combined)
}
}
let foundEmails: string[] = []
for (let data of metadata) {
if (!data._id) {
continue
}
const alreadyExisting = data.email && foundEmails.indexOf(data.email) !== -1
const globalId = getGlobalIDFromUserMetadataID(data._id)
if (!users.find(user => user._id === globalId) || alreadyExisting) {
toWrite.push({ ...data, _deleted: true })
}
if (data.email) {
foundEmails.push(data.email)
}
}
await db.bulkDocs(toWrite)
}

View File

@ -10,7 +10,7 @@ import fs from "fs"
import { watch } from "./watch"
import * as automations from "./automations"
import * as fileSystem from "./utilities/fileSystem"
import eventEmitter from "./events"
import { default as eventEmitter, init as eventInit } from "./events"
import * as migrations from "./migrations"
import * as bullboard from "./automations/bullboard"
import * as pro from "@budibase/pro"
@ -63,6 +63,7 @@ export async function startup(app?: any, server?: any) {
eventEmitter.emitPort(env.PORT)
fileSystem.init()
await redis.init()
eventInit()
// run migrations on startup if not done via http
// not recommended in a clustered environment

View File

@ -49,6 +49,7 @@ import {
SearchFilters,
UserRoles,
} from "@budibase/types"
import { BUILTIN_ROLE_IDS } from "@budibase/backend-core/src/security/roles"
type DefaultUserValues = {
globalUserId: string
@ -306,6 +307,33 @@ class TestConfiguration {
}
}
async createGroup(roleId: string = BUILTIN_ROLE_IDS.BASIC) {
return context.doInTenant(this.tenantId!, async () => {
const baseGroup = structures.userGroups.userGroup()
baseGroup.roles = {
[this.prodAppId]: roleId,
}
const { id, rev } = await pro.sdk.groups.save(baseGroup)
return {
_id: id,
_rev: rev,
...baseGroup,
}
})
}
async addUserToGroup(groupId: string, userId: string) {
return context.doInTenant(this.tenantId!, async () => {
await pro.sdk.groups.addUsers(groupId, [userId])
})
}
async removeUserFromGroup(groupId: string, userId: string) {
return context.doInTenant(this.tenantId!, async () => {
await pro.sdk.groups.removeUsers(groupId, [userId])
})
}
async login({ roleId, userId, builder, prodApp = false }: any = {}) {
const appId = prodApp ? this.prodAppId : this.appId
return context.doInAppContext(appId, async () => {

View File

@ -9,6 +9,7 @@ import {
import env from "../environment"
import { groups } from "@budibase/pro"
import { UserCtx, ContextUser, User, UserGroup } from "@budibase/types"
import { global } from "yargs"
export function updateAppRole(
user: ContextUser,
@ -16,7 +17,7 @@ export function updateAppRole(
) {
appId = appId || context.getAppId()
if (!user || !user.roles) {
if (!user || (!user.roles && !user.userGroups)) {
return user
}
// if in an multi-tenancy environment make sure roles are never updated
@ -27,7 +28,7 @@ export function updateAppRole(
return user
}
// always use the deployed app
if (appId) {
if (appId && user.roles) {
user.roleId = user.roles[dbCore.getProdAppID(appId)]
}
// if a role wasn't found then either set as admin (builder) or public (everyone else)
@ -60,7 +61,7 @@ async function checkGroupRoles(
return user
}
async function processUser(
export async function processUser(
user: ContextUser,
opts: { appId?: string; groups?: UserGroup[] } = {}
) {
@ -94,16 +95,15 @@ export async function getGlobalUser(userId: string) {
return processUser(user, { appId })
}
export async function getGlobalUsers(users?: ContextUser[]) {
export async function getGlobalUsers(
userIds?: string[],
opts?: { noProcessing?: boolean }
) {
const appId = context.getAppId()
const db = tenancy.getGlobalDB()
const allGroups = await groups.fetch()
let globalUsers
if (users) {
const globalIds = users.map(user =>
getGlobalIDFromUserMetadataID(user._id!)
)
globalUsers = (await db.allDocs(getMultiIDParams(globalIds))).rows.map(
if (userIds) {
globalUsers = (await db.allDocs(getMultiIDParams(userIds))).rows.map(
row => row.doc
)
} else {
@ -126,15 +126,20 @@ export async function getGlobalUsers(users?: ContextUser[]) {
return globalUsers
}
// pass in the groups, meaning we don't actually need to retrieve them for
// each user individually
return Promise.all(
globalUsers.map(user => processUser(user, { groups: allGroups }))
)
if (opts?.noProcessing) {
return globalUsers
} else {
// pass in the groups, meaning we don't actually need to retrieve them for
// each user individually
const allGroups = await groups.fetch()
return Promise.all(
globalUsers.map(user => processUser(user, { groups: allGroups }))
)
}
}
export async function getGlobalUsersFromMetadata(users: ContextUser[]) {
const globalUsers = await getGlobalUsers(users)
const globalUsers = await getGlobalUsers(users.map(user => user._id!))
return users.map(user => {
const globalUser = globalUsers.find(
globalUser => globalUser && user._id?.includes(globalUser._id)

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/shared-core",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Shared data utils",
"main": "dist/cjs/src/index.js",
"types": "dist/mjs/src/index.d.ts",
@ -20,7 +20,7 @@
"dev:builder": "yarn prebuild && concurrently \"tsc -p tsconfig.build.json --watch\" \"tsc -p tsconfig-cjs.build.json --watch\""
},
"dependencies": {
"@budibase/types": "2.4.44-alpha.19"
"@budibase/types": "2.5.6-alpha.3"
},
"devDependencies": {
"concurrently": "^7.6.0",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/string-templates",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Handlebars wrapper for Budibase templating.",
"main": "src/index.cjs",
"module": "dist/bundle.mjs",
@ -30,7 +30,7 @@
"handlebars": "^4.7.6",
"handlebars-utils": "^1.0.6",
"lodash": "^4.17.20",
"vm2": "^3.9.4"
"vm2": "^3.9.15"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^17.1.0",

View File

@ -1,6 +1,6 @@
{
"name": "@budibase/types",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Budibase types",
"main": "dist/cjs/index.js",
"types": "dist/mjs/index.d.ts",

View File

@ -2,4 +2,5 @@ import { Document } from "../document"
export interface UserMetadata extends Document {
roleId: string
email?: string
}

View File

@ -1,4 +1,5 @@
import { Hosting } from "../hosting"
import { Group, Identity } from "./identification"
export enum Event {
// USER
@ -186,6 +187,24 @@ export enum Event {
AUDIT_LOGS_DOWNLOADED = "audit_log:downloaded",
}
export const UserGroupSyncEvents: Event[] = [
Event.USER_CREATED,
Event.USER_UPDATED,
Event.USER_DELETED,
Event.USER_PERMISSION_ADMIN_ASSIGNED,
Event.USER_PERMISSION_ADMIN_REMOVED,
Event.USER_PERMISSION_BUILDER_ASSIGNED,
Event.USER_PERMISSION_BUILDER_REMOVED,
Event.USER_GROUP_CREATED,
Event.USER_GROUP_UPDATED,
Event.USER_GROUP_DELETED,
Event.USER_GROUP_USERS_ADDED,
Event.USER_GROUP_USERS_REMOVED,
Event.USER_GROUP_PERMISSIONS_EDITED,
]
export const AsyncEvents: Event[] = [...UserGroupSyncEvents]
// all events that are not audited have been added to this record as undefined, this means
// that Typescript can protect us against new events being added and auditing of those
// events not being considered. This might be a little ugly, but provides a level of
@ -383,3 +402,21 @@ export interface BaseEvent {
}
export type TableExportFormat = "json" | "csv"
export type DocUpdateEvent = {
id: string
tenantId: string
appId?: string
}
export interface EventProcessor {
processEvent(
event: Event,
identity: Identity,
properties: any,
timestamp?: string | number
): Promise<void>
identify?(identity: Identity, timestamp?: string | number): Promise<void>
identifyGroup?(group: Group, timestamp?: string | number): Promise<void>
shutdown?(): void
}

View File

@ -46,6 +46,8 @@ export interface Identity {
environment: string
installationId?: string
tenantId?: string
// usable - no unique format
realTenantId?: string
hostInfo?: HostInfo
}

View File

@ -7,7 +7,8 @@
"resolveJsonModule": true,
"incremental": true,
"sourceMap": true,
"declaration": true
"declaration": true,
"skipLibCheck": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist", "**/*.spec.ts", "**/*.spec.js"]

View File

@ -1,7 +1,7 @@
{
"name": "@budibase/worker",
"email": "hi@budibase.com",
"version": "2.4.44-alpha.19",
"version": "2.5.6-alpha.3",
"description": "Budibase background service",
"main": "src/index.ts",
"repository": {
@ -37,10 +37,10 @@
"author": "Budibase",
"license": "GPL-3.0",
"dependencies": {
"@budibase/backend-core": "2.4.44-alpha.19",
"@budibase/pro": "2.4.44-alpha.19",
"@budibase/string-templates": "2.4.44-alpha.19",
"@budibase/types": "2.4.44-alpha.19",
"@budibase/backend-core": "2.5.6-alpha.3",
"@budibase/pro": "2.5.6-alpha.3",
"@budibase/string-templates": "2.5.6-alpha.3",
"@budibase/types": "2.5.6-alpha.3",
"@koa/router": "8.0.8",
"@sentry/node": "6.17.7",
"@techpass/passport-openidconnect": "0.3.2",

View File

@ -1,6 +1,6 @@
import Router from "@koa/router"
const compress = require("koa-compress")
const zlib = require("zlib")
import zlib from "zlib"
import { routes } from "./routes"
import { middleware as pro } from "@budibase/pro"
import { auth, middleware } from "@budibase/backend-core"

View File

@ -126,9 +126,8 @@ describe("/api/global/auth", () => {
it("should prevent user from logging in", async () => {
user = await config.createUser()
const account = structures.accounts.ssoAccount() as CloudAccount
mocks.accounts.getAccount.mockReturnValueOnce(
Promise.resolve(account)
)
account.email = user.email
mocks.accounts.getAccountByTenantId.mockResolvedValueOnce(account)
await testSSOUser()
})
@ -186,9 +185,8 @@ describe("/api/global/auth", () => {
it("should prevent user from generating password reset email", async () => {
user = await config.createUser(structures.users.user())
const account = structures.accounts.ssoAccount() as CloudAccount
mocks.accounts.getAccount.mockReturnValueOnce(
Promise.resolve(account)
)
account.email = user.email
mocks.accounts.getAccountByTenantId.mockResolvedValueOnce(account)
await testSSOUser()
})

View File

@ -585,6 +585,59 @@ describe("scim", () => {
totalResults: groupCount,
})
})
it("can fetch groups using displayName filters", async () => {
const groupToFetch = _.sample(groups)
const response = await getScimGroups({
params: { filter: `displayName eq "${groupToFetch!.displayName}"` },
})
expect(response).toEqual({
Resources: [groupToFetch],
itemsPerPage: 1,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex: 1,
totalResults: 1,
})
})
it("can fetch groups excluding members", async () => {
const response = await getScimGroups({
params: { excludedAttributes: "members" },
})
expect(response).toEqual({
Resources: expect.arrayContaining(
groups.map(g => {
const { members, ...groupData } = g
return groupData
})
),
itemsPerPage: 25,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex: 1,
totalResults: groupCount,
})
})
it("can fetch groups excluding multiple fields", async () => {
const response = await getScimGroups({
params: { excludedAttributes: "members,displayName" },
})
expect(response).toEqual({
Resources: expect.arrayContaining(
groups.map(g => {
const { members, displayName, ...groupData } = g
return groupData
})
),
itemsPerPage: 25,
schemas: ["urn:ietf:params:scim:api:messages:2.0:ListResponse"],
startIndex: 1,
totalResults: groupCount,
})
})
})
})
@ -662,6 +715,16 @@ describe("scim", () => {
status: 404,
})
})
it("should allow excluding members", async () => {
const response = await findScimGroup(group.id, {
qs: "excludedAttributes=members",
})
const { members, ...expectedResponse } = group
expect(response).toEqual(expectedResponse)
})
})
describe("DELETE /api/global/scim/v2/groups/:id", () => {

View File

@ -1,6 +1,6 @@
import { structures } from "../../../tests"
import { mocks } from "@budibase/backend-core/tests"
import { env } from "@budibase/backend-core"
import { env, context } from "@budibase/backend-core"
import * as users from "../users"
import { CloudAccount } from "@budibase/types"
import { isPreventPasswordActions } from "../users"
@ -16,32 +16,50 @@ describe("users", () => {
describe("isPreventPasswordActions", () => {
it("returns false for non sso user", async () => {
const user = structures.users.user()
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(false)
await context.doInTenant(structures.tenant.id(), async () => {
const user = structures.users.user()
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(false)
})
})
it("returns true for sso account user", async () => {
const user = structures.users.user()
mocks.accounts.getAccount.mockReturnValue(
Promise.resolve(structures.accounts.ssoAccount() as CloudAccount)
)
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(true)
await context.doInTenant(structures.tenant.id(), async () => {
const user = structures.users.user()
const account = structures.accounts.ssoAccount() as CloudAccount
account.email = user.email
mocks.accounts.getAccountByTenantId.mockResolvedValueOnce(account)
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(true)
})
})
it("returns false when account doesn't match user email", async () => {
await context.doInTenant(structures.tenant.id(), async () => {
const user = structures.users.user()
const account = structures.accounts.ssoAccount() as CloudAccount
mocks.accounts.getAccountByTenantId.mockResolvedValueOnce(account)
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(false)
})
})
it("returns true for sso user", async () => {
const user = structures.users.ssoUser()
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(true)
await context.doInTenant(structures.tenant.id(), async () => {
const user = structures.users.ssoUser()
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(true)
})
})
describe("enforced sso", () => {
it("returns true for all users when sso is enforced", async () => {
const user = structures.users.user()
pro.features.isSSOEnforced.mockReturnValue(Promise.resolve(true))
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(true)
await context.doInTenant(structures.tenant.id(), async () => {
const user = structures.users.user()
pro.features.isSSOEnforced.mockResolvedValueOnce(true)
const result = await users.isPreventPasswordActions(user)
expect(result).toBe(true)
})
})
})

View File

@ -1,5 +1,4 @@
import env from "../../environment"
import * as apps from "../../utilities/appService"
import * as eventHelpers from "./events"
import {
accounts,
@ -30,9 +29,9 @@ import {
PlatformUser,
PlatformUserByEmail,
RowResponse,
SearchUsersRequest,
User,
SaveUserOpts,
Account,
} from "@budibase/types"
import { sendEmail } from "../../utilities/email"
import { EmailTemplatePurpose } from "../../constants"
@ -90,7 +89,8 @@ const buildUser = async (
requirePassword: true,
},
tenantId: string,
dbUser?: any
dbUser?: any,
account?: Account
): Promise<User> => {
let { password, _id } = user
@ -101,7 +101,7 @@ const buildUser = async (
let hashedPassword
if (password) {
if (await isPreventPasswordActions(user)) {
if (await isPreventPasswordActions(user, account)) {
throw new HTTPError("Password change is disabled for this user", 400)
}
hashedPassword = opts.hashPassword ? await utils.hash(password) : password
@ -172,7 +172,7 @@ const validateUniqueUser = async (email: string, tenantId: string) => {
}
}
export async function isPreventPasswordActions(user: User) {
export async function isPreventPasswordActions(user: User, account?: Account) {
// when in maintenance mode we allow sso users with the admin role
// to perform any password action - this prevents lockout
if (coreEnv.ENABLE_SSO_MAINTENANCE_MODE && user.admin?.global) {
@ -190,8 +190,10 @@ export async function isPreventPasswordActions(user: User) {
}
// Check account sso
const account = await accountSdk.api.getAccount(user.email)
return !!(account && isSSOAccount(account))
if (!account) {
account = await accountSdk.api.getAccountByTenantId(tenancy.getTenantId())
}
return !!(account && account.email === user.email && isSSOAccount(account))
}
export const save = async (
@ -276,9 +278,6 @@ export const save = async (
await platform.users.addUser(tenantId, builtUser._id!, builtUser.email)
await cache.user.invalidateUser(response.id)
// let server know to sync user
await apps.syncUserInApps(_id, dbUser)
await Promise.all(groupPromises)
// finally returned the saved user from the db
@ -402,6 +401,7 @@ export const bulkCreate = async (
newUsers.push(newUser)
}
const account = await accountSdk.api.getAccountByTenantId(tenantId)
// create the promises array that will be called by bulkDocs
newUsers.forEach((user: any) => {
usersToSave.push(
@ -411,7 +411,9 @@ export const bulkCreate = async (
hashPassword: true,
requirePassword: user.requirePassword,
},
tenantId
tenantId,
undefined, // no dbUser
account
)
)
})
@ -425,7 +427,6 @@ export const bulkCreate = async (
// instead of relying on looping tenant creation
await platform.users.addUser(tenantId, user._id, user.email)
await eventHelpers.handleSaveEvents(user, undefined)
await apps.syncUserInApps(user._id)
}
const saved = usersToBulkSave.map(user => {
@ -564,8 +565,6 @@ export const destroy = async (id: string) => {
await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "deletion" })
// let server know to sync user
await apps.syncUserInApps(userId, dbUser)
}
const bulkDeleteProcessing = async (dbUser: User) => {
@ -574,8 +573,6 @@ const bulkDeleteProcessing = async (dbUser: User) => {
await eventHelpers.handleDeleteEvents(dbUser)
await cache.user.invalidateUser(userId)
await sessions.invalidateSessions(userId, { reason: "bulk-deletion" })
// let server know to sync user
await apps.syncUserInApps(userId, dbUser)
}
export const invite = async (

View File

@ -18,6 +18,7 @@ export class ScimGroupsAPI extends ScimTestAPI {
startIndex?: number
pageSize?: number
filter?: string
excludedAttributes?: string
}
}
) => {
@ -32,6 +33,9 @@ export class ScimGroupsAPI extends ScimTestAPI {
if (params?.filter) {
url += `filter=${params.filter}&`
}
if (params?.excludedAttributes) {
url += `excludedAttributes=${params.excludedAttributes}&`
}
const res = await this.call(url, "get", requestSettings)
return res.body as ScimGroupListResponse
}
@ -54,9 +58,12 @@ export class ScimGroupsAPI extends ScimTestAPI {
return res.body as ScimGroupResponse
}
find = async (id: string, requestSettings?: Partial<RequestSettings>) => {
find = async (
id: string,
requestSettings?: Partial<RequestSettings> & { qs?: string }
) => {
const res = await this.call(
`/api/global/scim/v2/groups/${id}`,
`/api/global/scim/v2/groups/${id}?${requestSettings?.qs}`,
"get",
requestSettings
)

View File

@ -1,46 +0,0 @@
import fetch from "node-fetch"
import {
constants,
tenancy,
logging,
env as coreEnv,
} from "@budibase/backend-core"
import { checkSlashesInUrl } from "../utilities"
import env from "../environment"
import { SyncUserRequest, User } from "@budibase/types"
async function makeAppRequest(url: string, method: string, body: any) {
if (env.isTest()) {
return
}
const request: any = { headers: {} }
request.headers[constants.Header.API_KEY] = coreEnv.INTERNAL_API_KEY
if (tenancy.isTenantIdSet()) {
request.headers[constants.Header.TENANT_ID] = tenancy.getTenantId()
}
if (body) {
request.headers["Content-Type"] = "application/json"
request.body = JSON.stringify(body)
}
request.method = method
// add x-budibase-correlation-id header
logging.correlation.setHeader(request.headers)
return fetch(checkSlashesInUrl(env.APPS_URL + url), request)
}
export async function syncUserInApps(userId: string, previousUser?: User) {
const body: SyncUserRequest = {
previousUser,
}
const response = await makeAppRequest(
`/api/users/metadata/sync/${userId}`,
"POST",
body
)
if (response && response.status !== 200) {
throw "Unable to sync user."
}
}

View File

@ -36,7 +36,7 @@ describe("Internal API - Application creation, update, publish and delete", () =
const [syncResponse, sync] = await config.api.apps.sync(app.appId!)
expect(sync).toEqual({
message: "App sync not required, app not deployed.",
message: "App sync completed successfully.",
})
})

View File

@ -1386,6 +1386,46 @@
resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39"
integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==
"@budibase/backend-core@2.4.44-alpha.19":
version "2.4.44-alpha.19"
resolved "https://registry.yarnpkg.com/@budibase/backend-core/-/backend-core-2.4.44-alpha.19.tgz#921d8b6965910dd5087c003cfc384d9a50e6523a"
integrity sha512-o4m1R/INNyl4ruKF+GZv994GdjQcPi3XsTDFm6jo15b965g4NYbIHBUTwae1DyvyKhOpsvAONF4IzzcaPjrcig==
dependencies:
"@budibase/nano" "10.1.2"
"@budibase/pouchdb-replication-stream" "1.2.10"
"@budibase/types" "2.4.44-alpha.19"
"@shopify/jest-koa-mocks" "5.0.1"
"@techpass/passport-openidconnect" "0.3.2"
aws-cloudfront-sign "2.2.0"
aws-sdk "2.1030.0"
bcrypt "5.0.1"
bcryptjs "2.4.3"
bull "4.10.1"
correlation-id "4.0.0"
dotenv "16.0.1"
emitter-listener "1.1.2"
ioredis "4.28.0"
joi "17.6.0"
jsonwebtoken "9.0.0"
koa-passport "4.1.4"
koa-pino-logger "4.0.0"
lodash "4.17.21"
lodash.isarguments "3.1.0"
node-fetch "2.6.7"
passport-google-oauth "2.0.0"
passport-jwt "4.0.0"
passport-local "1.0.0"
passport-oauth2-refresh "^2.1.0"
posthog-node "1.3.0"
pouchdb "7.3.0"
pouchdb-find "7.2.2"
redlock "4.2.0"
sanitize-s3-objectkey "0.0.1"
semver "7.3.7"
tar-fs "2.1.1"
uuid "8.3.2"
zlib "1.0.5"
"@budibase/bbui@^0.9.139":
version "0.9.190"
resolved "https://registry.yarnpkg.com/@budibase/bbui/-/bbui-0.9.190.tgz#e1ec400ac90f556bfbc80fc23a04506f1585ea81"
@ -1486,6 +1526,25 @@
pouchdb-promise "^6.0.4"
through2 "^2.0.0"
"@budibase/pro@2.5.6-alpha.3":
version "2.5.6-alpha.3"
resolved "https://registry.yarnpkg.com/@budibase/pro/-/pro-2.5.6-alpha.3.tgz#97d922d0277bc12256fe4481bfdfb880475f9c28"
integrity sha512-temynYWjkQyO8Jq4FDKvwDC2FjAFUxvQq8imoYortefcICKjiVpRUebTudl2PlhNggVYmS9Ec9zLnGkVulg1eA==
dependencies:
"@budibase/backend-core" "2.5.6-alpha.3"
"@budibase/shared-core" "2.4.44-alpha.1"
"@budibase/string-templates" "2.4.44-alpha.1"
"@budibase/types" "2.5.6-alpha.3"
"@koa/router" "8.0.8"
bull "4.10.1"
joi "17.6.0"
jsonwebtoken "8.5.1"
lru-cache "^7.14.1"
memorystream "^0.3.1"
node-fetch "^2.6.1"
scim-patch "^0.7.0"
scim2-parse-filter "^0.2.8"
"@budibase/shared-core@2.4.44-alpha.1":
version "2.4.44-alpha.1"
resolved "https://registry.yarnpkg.com/@budibase/shared-core/-/shared-core-2.4.44-alpha.1.tgz#3d499e40e7e6c646e13a87cd08e01ba116c2ff1d"
@ -1528,6 +1587,13 @@
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.4.44-alpha.1.tgz#1679657aa180d9c59afa1dffa611bff0638bd933"
integrity sha512-Sq+8HfM75EBMoOvKYFwELdlxmVN6wNZMofDjT/2G+9aF+Zfe5Tzw69C+unmdBgcGGjGCHEYWSz4mF0v8FPAGbg==
"@budibase/types@2.4.44-alpha.19":
version "2.4.44-alpha.19"
resolved "https://registry.yarnpkg.com/@budibase/types/-/types-2.4.44-alpha.19.tgz#480c44dbb5d750013feb6fba0088d42db9d2930a"
integrity sha512-BWURzML9lRRlApF8PVG/TUsFOP2f0S+PGsaoEm67aPNZRK0n61r9YCTyb9OHnH1NnNZqlHegzKALAshvGBBoKQ==
dependencies:
scim-patch "^0.7.0"
"@bull-board/api@3.7.0":
version "3.7.0"
resolved "https://registry.yarnpkg.com/@bull-board/api/-/api-3.7.0.tgz#231f687187c0cb34e0b97f463917b6aaeb4ef6af"
@ -23898,7 +23964,7 @@ vlq@^0.2.2:
resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26"
integrity sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==
vm2@3.9.16, vm2@^3.9.11, vm2@^3.9.4:
vm2@3.9.16, vm2@^3.9.11, vm2@^3.9.15, vm2@^3.9.4:
version "3.9.16"
resolved "https://registry.yarnpkg.com/vm2/-/vm2-3.9.16.tgz#0fbc2a265f7bf8b837cea6f4a908f88a3f93b8e6"
integrity sha512-3T9LscojNTxdOyG+e8gFeyBXkMlOBYDoF6dqZbj+MPVHi9x10UfiTAJIobuchRCp3QvC+inybTbMJIUrLsig0w==
@ -24424,14 +24490,6 @@ xml2js@0.4.19:
sax ">=0.6.0"
xmlbuilder "~9.0.1"
xml2js@0.4.23, xml2js@^0.4.5:
version "0.4.23"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==
dependencies:
sax ">=0.6.0"
xmlbuilder "~11.0.0"
xml2js@0.5.0, xml2js@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7"
@ -24440,6 +24498,14 @@ xml2js@0.5.0, xml2js@^0.5.0:
sax ">=0.6.0"
xmlbuilder "~11.0.0"
xml2js@^0.4.5:
version "0.4.23"
resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==
dependencies:
sax ">=0.6.0"
xmlbuilder "~11.0.0"
xmlbuilder@~11.0.0:
version "11.0.1"
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3"