Merge branch 'BUDI-8064/doc-writethrough' into BUDI-8046/scim-logger
This commit is contained in:
commit
71c5d2645f
|
@ -67,7 +67,7 @@
|
|||
"@types/lodash": "4.14.200",
|
||||
"@types/node-fetch": "2.6.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/redlock": "4.0.3",
|
||||
"@types/redlock": "4.0.7",
|
||||
"@types/semver": "7.3.7",
|
||||
"@types/tar-fs": "2.0.1",
|
||||
"@types/uuid": "8.3.4",
|
||||
|
|
|
@ -46,6 +46,25 @@ export default class BaseCache {
|
|||
await client.store(key, value, ttl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk write to the cache.
|
||||
*/
|
||||
async bulkStore(
|
||||
data: Record<string, any>,
|
||||
ttl: number | null = null,
|
||||
opts = { useTenancy: true }
|
||||
) {
|
||||
if (opts.useTenancy) {
|
||||
data = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[generateTenantKey(key)] = value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
}
|
||||
|
||||
const client = await this.getClient()
|
||||
await client.bulkStore(data, ttl)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove from cache.
|
||||
*/
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
import BaseCache from "./base"
|
||||
import { getDocWritethroughClient } from "../redis/init"
|
||||
import { AnyDocument, Database, LockName, LockType } from "@budibase/types"
|
||||
import * as locks from "../redis/redlockImpl"
|
||||
import { AnyDocument, Database } from "@budibase/types"
|
||||
|
||||
import { JobQueue, createQueue } from "../queue"
|
||||
import * as context from "../context"
|
||||
import * as dbUtils from "../db"
|
||||
|
||||
let CACHE: BaseCache | null = null
|
||||
|
@ -17,7 +15,6 @@ async function getCache() {
|
|||
}
|
||||
|
||||
interface ProcessDocMessage {
|
||||
tenantId: string
|
||||
dbName: string
|
||||
docId: string
|
||||
cacheKeyPrefix: string
|
||||
|
@ -28,25 +25,8 @@ export const docWritethroughProcessorQueue = createQueue<ProcessDocMessage>(
|
|||
)
|
||||
|
||||
docWritethroughProcessorQueue.process(async message => {
|
||||
const { tenantId, cacheKeyPrefix } = message.data
|
||||
await context.doInTenant(tenantId, async () => {
|
||||
const lockResponse = await locks.doWithLock(
|
||||
{
|
||||
type: LockType.TRY_ONCE,
|
||||
name: LockName.PERSIST_WRITETHROUGH,
|
||||
resource: cacheKeyPrefix,
|
||||
ttl: 15000,
|
||||
},
|
||||
async () => {
|
||||
await persistToDb(message.data)
|
||||
console.log("DocWritethrough persisted", { data: message.data })
|
||||
}
|
||||
)
|
||||
|
||||
if (!lockResponse.executed) {
|
||||
console.log(`Ignoring redlock conflict in write-through cache`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
export async function persistToDb({
|
||||
|
@ -85,7 +65,6 @@ export class DocWritethrough {
|
|||
private db: Database
|
||||
private _docId: string
|
||||
private writeRateMs: number
|
||||
private tenantId: string
|
||||
|
||||
private cacheKeyPrefix: string
|
||||
|
||||
|
@ -94,7 +73,6 @@ export class DocWritethrough {
|
|||
this._docId = docId
|
||||
this.writeRateMs = writeRateMs
|
||||
this.cacheKeyPrefix = `${this.db.name}:${this.docId}`
|
||||
this.tenantId = context.getTenantId()
|
||||
}
|
||||
|
||||
get docId() {
|
||||
|
@ -108,7 +86,6 @@ export class DocWritethrough {
|
|||
|
||||
docWritethroughProcessorQueue.add(
|
||||
{
|
||||
tenantId: this.tenantId,
|
||||
dbName: this.db.name,
|
||||
docId: this.docId,
|
||||
cacheKeyPrefix: this.cacheKeyPrefix,
|
||||
|
@ -123,9 +100,10 @@ export class DocWritethrough {
|
|||
}
|
||||
|
||||
private async storeToCache(cache: BaseCache, data: Record<string, any>) {
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
const cacheKey = this.cacheKeyPrefix + ":data:" + key
|
||||
await cache.store(cacheKey, { key, value }, undefined)
|
||||
}
|
||||
data = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[this.cacheKeyPrefix + ":data:" + key] = { key, value }
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
await cache.bulkStore(data, null)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,10 +47,8 @@ describe("docWritethrough", () => {
|
|||
beforeEach(async () => {
|
||||
resetTime()
|
||||
documentId = structures.uuid()
|
||||
await config.doInTenant(async () => {
|
||||
docWritethrough = new DocWritethrough(db, documentId, WRITE_RATE_MS)
|
||||
})
|
||||
})
|
||||
|
||||
it("patching will not persist if timeout does not hit", async () => {
|
||||
await config.doInTenant(async () => {
|
||||
|
@ -256,6 +254,8 @@ describe("docWritethrough", () => {
|
|||
|
||||
expect(storeToCacheSpy).toBeCalledTimes(45)
|
||||
|
||||
// Ideally we want to spy on persistToDb from ./docWritethrough, but due our barrel files configuration required quite of a complex setup.
|
||||
// We are relying on the document being stored only once (otherwise we would have _rev updated)
|
||||
expect(await db.get(documentId)).toEqual(
|
||||
expect.objectContaining({
|
||||
_id: documentId,
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import env from "../environment"
|
||||
import Redis from "ioredis"
|
||||
import Redis, { Cluster } from "ioredis"
|
||||
// mock-redis doesn't have any typing
|
||||
let MockRedis: any | undefined
|
||||
if (env.MOCK_REDIS) {
|
||||
|
@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
|
|||
|
||||
// for testing just generate the client once
|
||||
let CLOSED = false
|
||||
let CLIENTS: { [key: number]: any } = {}
|
||||
const CLIENTS: Record<number, Redis> = {}
|
||||
let CONNECTED = false
|
||||
|
||||
// mock redis always connected
|
||||
|
@ -36,7 +36,7 @@ if (env.MOCK_REDIS) {
|
|||
CONNECTED = true
|
||||
}
|
||||
|
||||
function pickClient(selectDb: number): any {
|
||||
function pickClient(selectDb: number) {
|
||||
return CLIENTS[selectDb]
|
||||
}
|
||||
|
||||
|
@ -201,12 +201,15 @@ class RedisWrapper {
|
|||
key = `${db}${SEPARATOR}${key}`
|
||||
let stream
|
||||
if (CLUSTERED) {
|
||||
let node = this.getClient().nodes("master")
|
||||
let node = (this.getClient() as never as Cluster).nodes("master")
|
||||
stream = node[0].scanStream({ match: key + "*", count: 100 })
|
||||
} else {
|
||||
stream = this.getClient().scanStream({ match: key + "*", count: 100 })
|
||||
stream = (this.getClient() as Redis).scanStream({
|
||||
match: key + "*",
|
||||
count: 100,
|
||||
})
|
||||
}
|
||||
return promisifyStream(stream, this.getClient())
|
||||
return promisifyStream(stream, this.getClient() as any)
|
||||
}
|
||||
|
||||
async keys(pattern: string) {
|
||||
|
@ -221,14 +224,16 @@ class RedisWrapper {
|
|||
|
||||
async get(key: string) {
|
||||
const db = this._db
|
||||
let response = await this.getClient().get(addDbPrefix(db, key))
|
||||
const response = await this.getClient().get(addDbPrefix(db, key))
|
||||
// overwrite the prefixed key
|
||||
// @ts-ignore
|
||||
if (response != null && response.key) {
|
||||
// @ts-ignore
|
||||
response.key = key
|
||||
}
|
||||
// if its not an object just return the response
|
||||
try {
|
||||
return JSON.parse(response)
|
||||
return JSON.parse(response!)
|
||||
} catch (err) {
|
||||
return response
|
||||
}
|
||||
|
@ -274,13 +279,44 @@ class RedisWrapper {
|
|||
}
|
||||
}
|
||||
|
||||
async bulkStore(
|
||||
data: Record<string, any>,
|
||||
expirySeconds: number | null = null
|
||||
) {
|
||||
const client = this.getClient()
|
||||
|
||||
const dataToStore = Object.entries(data).reduce((acc, [key, value]) => {
|
||||
acc[addDbPrefix(this._db, key)] =
|
||||
typeof value === "object" ? JSON.stringify(value) : value
|
||||
return acc
|
||||
}, {} as Record<string, any>)
|
||||
|
||||
const luaScript = `
|
||||
for i, key in ipairs(KEYS) do
|
||||
redis.call('MSET', key, ARGV[i])
|
||||
${
|
||||
expirySeconds !== null
|
||||
? `redis.call('EXPIRE', key, ARGV[#ARGV])`
|
||||
: ""
|
||||
}
|
||||
end
|
||||
`
|
||||
const keys = Object.keys(dataToStore)
|
||||
const values = Object.values(dataToStore)
|
||||
if (expirySeconds !== null) {
|
||||
values.push(expirySeconds)
|
||||
}
|
||||
|
||||
await client.eval(luaScript, keys.length, ...keys, ...values)
|
||||
}
|
||||
|
||||
async getTTL(key: string) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
return this.getClient().ttl(prefixedKey)
|
||||
}
|
||||
|
||||
async setExpiry(key: string, expirySeconds: number | null) {
|
||||
async setExpiry(key: string, expirySeconds: number) {
|
||||
const db = this._db
|
||||
const prefixedKey = addDbPrefix(db, key)
|
||||
await this.getClient().expire(prefixedKey, expirySeconds)
|
||||
|
|
|
@ -72,7 +72,7 @@ const OPTIONS: Record<keyof typeof LockType, Redlock.Options> = {
|
|||
export async function newRedlock(opts: Redlock.Options = {}) {
|
||||
const options = { ...OPTIONS.DEFAULT, ...opts }
|
||||
const redisWrapper = await getLockClient()
|
||||
const client = redisWrapper.getClient()
|
||||
const client = redisWrapper.getClient() as any
|
||||
return new Redlock([client], options)
|
||||
}
|
||||
|
||||
|
@ -82,6 +82,11 @@ type SuccessfulRedlockExecution<T> = {
|
|||
}
|
||||
type UnsuccessfulRedlockExecution = {
|
||||
executed: false
|
||||
reason: UnsuccessfulRedlockExecutionReason
|
||||
}
|
||||
|
||||
export const enum UnsuccessfulRedlockExecutionReason {
|
||||
LockTakenWithTryOnce = "LOCK_TAKEN_WITH_TRY_ONCE",
|
||||
}
|
||||
|
||||
type RedlockExecution<T> =
|
||||
|
@ -141,7 +146,10 @@ export async function doWithLock<T>(
|
|||
if (opts.type === LockType.TRY_ONCE) {
|
||||
// don't throw for try-once locks, they will always error
|
||||
// due to retry count (0) exceeded
|
||||
return { executed: false }
|
||||
return {
|
||||
executed: false,
|
||||
reason: UnsuccessfulRedlockExecutionReason.LockTakenWithTryOnce,
|
||||
}
|
||||
} else {
|
||||
throw e
|
||||
}
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
import { generator, structures } from "../../../tests"
|
||||
import RedisWrapper from "../redis"
|
||||
|
||||
describe("redis", () => {
|
||||
let redis: RedisWrapper
|
||||
|
||||
beforeEach(async () => {
|
||||
redis = new RedisWrapper(structures.db.id())
|
||||
await redis.init()
|
||||
})
|
||||
|
||||
describe("store", () => {
|
||||
it("a basic value can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = generator.word()
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
|
||||
it("objects can be persisted", async () => {
|
||||
const key = structures.uuid()
|
||||
const value = { [generator.word()]: generator.word() }
|
||||
|
||||
await redis.store(key, value)
|
||||
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
})
|
||||
})
|
||||
|
||||
describe("bulkStore", () => {
|
||||
function createRandomObject(
|
||||
keyLength: number,
|
||||
valueGenerator: () => any = () => generator.word()
|
||||
) {
|
||||
return generator
|
||||
.unique(() => generator.word(), keyLength)
|
||||
.reduce((acc, key) => {
|
||||
acc[key] = valueGenerator()
|
||||
return acc
|
||||
}, {} as Record<string, string>)
|
||||
}
|
||||
|
||||
it("a basic object can be persisted", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(10)
|
||||
})
|
||||
|
||||
it("a complex object can be persisted", async () => {
|
||||
const data = {
|
||||
...createRandomObject(10, () => createRandomObject(5)),
|
||||
...createRandomObject(5),
|
||||
}
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(15)
|
||||
})
|
||||
|
||||
it("no TTL is set by default", async () => {
|
||||
const data = createRandomObject(10)
|
||||
|
||||
await redis.bulkStore(data)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(-1)
|
||||
}
|
||||
})
|
||||
|
||||
it("a bulk store can be persisted with TTL", async () => {
|
||||
const ttl = 500
|
||||
const data = createRandomObject(8)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toEqual(value)
|
||||
expect(await redis.getTTL(key)).toEqual(ttl)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(8)
|
||||
})
|
||||
|
||||
it("setting a TTL of -1 will not persist the key", async () => {
|
||||
const ttl = -1
|
||||
const data = createRandomObject(5)
|
||||
|
||||
await redis.bulkStore(data, ttl)
|
||||
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
expect(await redis.get(key)).toBe(null)
|
||||
}
|
||||
|
||||
expect(await redis.keys("*")).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -84,16 +84,18 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } {
|
|||
return cloneDeep(BUILTIN_ROLES)
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role._id
|
||||
)
|
||||
export function isBuiltin(role: string) {
|
||||
return getBuiltinRole(role) !== undefined
|
||||
}
|
||||
|
||||
export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(
|
||||
role => role.name
|
||||
)
|
||||
|
||||
export function isBuiltin(role?: string) {
|
||||
return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))
|
||||
export function getBuiltinRole(roleId: string): Role | undefined {
|
||||
const role = Object.values(BUILTIN_ROLES).find(role =>
|
||||
roleId.includes(role._id)
|
||||
)
|
||||
if (!role) {
|
||||
return undefined
|
||||
}
|
||||
return cloneDeep(role)
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -123,7 +125,7 @@ export function builtinRoleToNumber(id?: string) {
|
|||
/**
|
||||
* Converts any role to a number, but has to be async to get the roles from db.
|
||||
*/
|
||||
export async function roleToNumber(id?: string) {
|
||||
export async function roleToNumber(id: string) {
|
||||
if (isBuiltin(id)) {
|
||||
return builtinRoleToNumber(id)
|
||||
}
|
||||
|
@ -131,7 +133,7 @@ export async function roleToNumber(id?: string) {
|
|||
defaultPublic: true,
|
||||
})) as RoleDoc[]
|
||||
for (let role of hierarchy) {
|
||||
if (isBuiltin(role?.inherits)) {
|
||||
if (role?.inherits && isBuiltin(role.inherits)) {
|
||||
return builtinRoleToNumber(role.inherits) + 1
|
||||
}
|
||||
}
|
||||
|
@ -161,35 +163,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
|
|||
* @returns The role object, which may contain an "inherits" property.
|
||||
*/
|
||||
export async function getRole(
|
||||
roleId?: string,
|
||||
roleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc | undefined> {
|
||||
if (!roleId) {
|
||||
return undefined
|
||||
}
|
||||
let role: any = {}
|
||||
): Promise<RoleDoc> {
|
||||
// built in roles mostly come from the in-code implementation,
|
||||
// but can be extended by a doc stored about them (e.g. permissions)
|
||||
if (isBuiltin(roleId)) {
|
||||
role = cloneDeep(
|
||||
Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
|
||||
)
|
||||
} else {
|
||||
let role: RoleDoc | undefined = getBuiltinRole(roleId)
|
||||
if (!role) {
|
||||
// make sure has the prefix (if it has it then it won't be added)
|
||||
roleId = prefixRoleID(roleId)
|
||||
}
|
||||
try {
|
||||
const db = getAppDB()
|
||||
const dbRole = await db.get(getDBRoleID(roleId))
|
||||
role = Object.assign(role, dbRole)
|
||||
const dbRole = await db.get<RoleDoc>(getDBRoleID(roleId))
|
||||
role = Object.assign(role || {}, dbRole)
|
||||
// finalise the ID
|
||||
role._id = getExternalRoleID(role._id, role.version)
|
||||
role._id = getExternalRoleID(role._id!, role.version)
|
||||
} catch (err) {
|
||||
if (!isBuiltin(roleId) && opts?.defaultPublic) {
|
||||
return cloneDeep(BUILTIN_ROLES.PUBLIC)
|
||||
}
|
||||
// only throw an error if there is no role at all
|
||||
if (Object.keys(role).length === 0) {
|
||||
if (!role || Object.keys(role).length === 0) {
|
||||
throw err
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +195,7 @@ export async function getRole(
|
|||
* Simple function to get all the roles based on the top level user role ID.
|
||||
*/
|
||||
async function getAllUserRoles(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
): Promise<RoleDoc[]> {
|
||||
// admins have access to all roles
|
||||
|
@ -226,7 +221,7 @@ async function getAllUserRoles(
|
|||
}
|
||||
|
||||
export async function getUserRoleIdHierarchy(
|
||||
userRoleId?: string
|
||||
userRoleId: string
|
||||
): Promise<string[]> {
|
||||
const roles = await getUserRoleHierarchy(userRoleId)
|
||||
return roles.map(role => role._id!)
|
||||
|
@ -241,7 +236,7 @@ export async function getUserRoleIdHierarchy(
|
|||
* highest level of access and the last being the lowest level.
|
||||
*/
|
||||
export async function getUserRoleHierarchy(
|
||||
userRoleId?: string,
|
||||
userRoleId: string,
|
||||
opts?: { defaultPublic?: boolean }
|
||||
) {
|
||||
// special case, if they don't have a role then they are a public user
|
||||
|
@ -265,9 +260,9 @@ export function checkForRoleResourceArray(
|
|||
return rolePerms
|
||||
}
|
||||
|
||||
export async function getAllRoleIds(appId?: string) {
|
||||
export async function getAllRoleIds(appId: string): Promise<string[]> {
|
||||
const roles = await getAllRoles(appId)
|
||||
return roles.map(role => role._id)
|
||||
return roles.map(role => role._id!)
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -7,8 +7,14 @@ import {
|
|||
} from "@budibase/backend-core"
|
||||
import { getUserMetadataParams, InternalTables } from "../../db/utils"
|
||||
import {
|
||||
AccessibleRolesResponse,
|
||||
Database,
|
||||
DestroyRoleResponse,
|
||||
FetchRolesResponse,
|
||||
FindRoleResponse,
|
||||
Role,
|
||||
SaveRoleRequest,
|
||||
SaveRoleResponse,
|
||||
UserCtx,
|
||||
UserMetadata,
|
||||
UserRoles,
|
||||
|
@ -25,43 +31,36 @@ async function updateRolesOnUserTable(
|
|||
db: Database,
|
||||
roleId: string,
|
||||
updateOption: string,
|
||||
roleVersion: string | undefined
|
||||
roleVersion?: string
|
||||
) {
|
||||
const table = await sdk.tables.getTable(InternalTables.USER_METADATA)
|
||||
const schema = table.schema
|
||||
const remove = updateOption === UpdateRolesOptions.REMOVED
|
||||
let updated = false
|
||||
for (let prop of Object.keys(schema)) {
|
||||
if (prop === "roleId") {
|
||||
updated = true
|
||||
const constraints = schema[prop].constraints!
|
||||
const constraints = table.schema.roleId?.constraints
|
||||
if (!constraints) {
|
||||
return
|
||||
}
|
||||
const updatedRoleId =
|
||||
roleVersion === roles.RoleIDVersion.NAME
|
||||
? roles.getExternalRoleID(roleId, roleVersion)
|
||||
: roleId
|
||||
const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId)
|
||||
const remove = updateOption === UpdateRolesOptions.REMOVED
|
||||
if (remove && indexOfRoleId !== -1) {
|
||||
constraints.inclusion!.splice(indexOfRoleId, 1)
|
||||
} else if (!remove && indexOfRoleId === -1) {
|
||||
constraints.inclusion!.push(updatedRoleId)
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
if (updated) {
|
||||
await db.put(table)
|
||||
}
|
||||
}
|
||||
|
||||
export async function fetch(ctx: UserCtx) {
|
||||
export async function fetch(ctx: UserCtx<void, FetchRolesResponse>) {
|
||||
ctx.body = await roles.getAllRoles()
|
||||
}
|
||||
|
||||
export async function find(ctx: UserCtx) {
|
||||
export async function find(ctx: UserCtx<void, FindRoleResponse>) {
|
||||
ctx.body = await roles.getRole(ctx.params.roleId)
|
||||
}
|
||||
|
||||
export async function save(ctx: UserCtx) {
|
||||
export async function save(ctx: UserCtx<SaveRoleRequest, SaveRoleResponse>) {
|
||||
const db = context.getAppDB()
|
||||
let { _id, name, inherits, permissionId, version } = ctx.request.body
|
||||
let isCreate = false
|
||||
|
@ -109,9 +108,9 @@ export async function save(ctx: UserCtx) {
|
|||
ctx.body = role
|
||||
}
|
||||
|
||||
export async function destroy(ctx: UserCtx) {
|
||||
export async function destroy(ctx: UserCtx<void, DestroyRoleResponse>) {
|
||||
const db = context.getAppDB()
|
||||
let roleId = ctx.params.roleId
|
||||
let roleId = ctx.params.roleId as string
|
||||
if (roles.isBuiltin(roleId)) {
|
||||
ctx.throw(400, "Cannot delete builtin role.")
|
||||
} else {
|
||||
|
@ -144,14 +143,18 @@ export async function destroy(ctx: UserCtx) {
|
|||
ctx.status = 200
|
||||
}
|
||||
|
||||
export async function accessible(ctx: UserCtx) {
|
||||
export async function accessible(ctx: UserCtx<void, AccessibleRolesResponse>) {
|
||||
let roleId = ctx.user?.roleId
|
||||
if (!roleId) {
|
||||
roleId = roles.BUILTIN_ROLE_IDS.PUBLIC
|
||||
}
|
||||
if (ctx.user && sharedSdk.users.isAdminOrBuilder(ctx.user)) {
|
||||
const appId = context.getAppId()
|
||||
if (!appId) {
|
||||
ctx.body = []
|
||||
} else {
|
||||
ctx.body = await roles.getAllRoleIds(appId)
|
||||
}
|
||||
} else {
|
||||
ctx.body = await roles.getUserRoleIdHierarchy(roleId!)
|
||||
}
|
||||
|
|
|
@ -63,7 +63,7 @@ export async function fetch(ctx: UserCtx) {
|
|||
export async function clientFetch(ctx: UserCtx) {
|
||||
const routing = await getRoutingStructure()
|
||||
let roleId = ctx.user?.role?._id
|
||||
const roleIds = await roles.getUserRoleIdHierarchy(roleId)
|
||||
const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : []
|
||||
for (let topLevel of Object.values(routing.routes) as any) {
|
||||
for (let subpathKey of Object.keys(topLevel.subpaths)) {
|
||||
let found = false
|
||||
|
|
|
@ -251,10 +251,15 @@ describe("/applications", () => {
|
|||
|
||||
describe("permissions", () => {
|
||||
it("should only return apps a user has access to", async () => {
|
||||
const user = await config.createUser()
|
||||
const user = await config.createUser({
|
||||
builder: { global: false },
|
||||
admin: { global: false },
|
||||
})
|
||||
|
||||
await config.withUser(user, async () => {
|
||||
const apps = await config.api.application.fetch()
|
||||
expect(apps.length).toBeGreaterThan(0)
|
||||
expect(apps).toHaveLength(0)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
import TestConfig from "../../../../tests/utilities/TestConfiguration"
|
||||
import env from "../../../../environment"
|
||||
import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
|
||||
import supertest from "supertest"
|
||||
|
||||
export * as structures from "../../../../tests/utilities/structures"
|
||||
|
@ -47,10 +46,10 @@ export function delay(ms: number) {
|
|||
}
|
||||
|
||||
let request: supertest.SuperTest<supertest.Test> | undefined | null,
|
||||
config: TestConfig | null
|
||||
config: TestConfiguration | null
|
||||
|
||||
export function beforeAll() {
|
||||
config = new TestConfig()
|
||||
config = new TestConfiguration()
|
||||
request = config.getRequest()
|
||||
}
|
||||
|
||||
|
|
|
@ -299,6 +299,16 @@ export default class TestConfiguration {
|
|||
}
|
||||
}
|
||||
|
||||
withUser(user: User, f: () => Promise<void>) {
|
||||
const oldUser = this.user
|
||||
this.user = user
|
||||
try {
|
||||
return f()
|
||||
} finally {
|
||||
this.user = oldUser
|
||||
}
|
||||
}
|
||||
|
||||
// UTILS
|
||||
|
||||
_req<Req extends Record<string, any> | void, Res>(
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
"@budibase/nano": "10.1.5",
|
||||
"@types/koa": "2.13.4",
|
||||
"@types/pouchdb": "6.4.0",
|
||||
"@types/redlock": "4.0.3",
|
||||
"@types/redlock": "4.0.7",
|
||||
"rimraf": "3.0.2",
|
||||
"typescript": "5.2.2"
|
||||
},
|
||||
|
|
|
@ -14,3 +14,4 @@ export * from "./cookies"
|
|||
export * from "./automation"
|
||||
export * from "./layout"
|
||||
export * from "./query"
|
||||
export * from "./role"
|
||||
|
|
|
@ -0,0 +1,22 @@
|
|||
import { Role } from "../../documents"
|
||||
|
||||
export interface SaveRoleRequest {
|
||||
_id?: string
|
||||
_rev?: string
|
||||
name: string
|
||||
inherits: string
|
||||
permissionId: string
|
||||
version: string
|
||||
}
|
||||
|
||||
export interface SaveRoleResponse extends Role {}
|
||||
|
||||
export interface FindRoleResponse extends Role {}
|
||||
|
||||
export type FetchRolesResponse = Role[]
|
||||
|
||||
export interface DestroyRoleResponse {
|
||||
message: string
|
||||
}
|
||||
|
||||
export type AccessibleRolesResponse = string[]
|
11
yarn.lock
11
yarn.lock
|
@ -5408,7 +5408,7 @@
|
|||
resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65"
|
||||
integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ==
|
||||
|
||||
"@types/ioredis@4.28.10":
|
||||
"@types/ioredis@4.28.10", "@types/ioredis@^4.28.10":
|
||||
version "4.28.10"
|
||||
resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.10.tgz#40ceb157a4141088d1394bb87c98ed09a75a06ff"
|
||||
integrity sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ==
|
||||
|
@ -5896,12 +5896,13 @@
|
|||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/redlock@4.0.3":
|
||||
version "4.0.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/redlock/-/redlock-4.0.3.tgz#aeab5fe5f0d433a125f6dcf9a884372ac0cddd4b"
|
||||
integrity sha512-mcvvrquwREbAqyZALNBIlf49AL9Aa324BG+J/Dv4TAP8g+nxQMBI4/APNqqS99QEY7VTNT9XvsaczCVGK8uNnQ==
|
||||
"@types/redlock@4.0.7":
|
||||
version "4.0.7"
|
||||
resolved "https://registry.yarnpkg.com/@types/redlock/-/redlock-4.0.7.tgz#33ed56f22a38d6b2f2e6ae5ed1b3fc1875a08e6b"
|
||||
integrity sha512-5D6egBv0fCfdbmnCETjEynVuiwFMEFFc3YFjh9EwhaaVTAi0YmB6UI1swq1S1rjIu+n27ppmlTFDK3D3cadJqg==
|
||||
dependencies:
|
||||
"@types/bluebird" "*"
|
||||
"@types/ioredis" "^4.28.10"
|
||||
"@types/redis" "^2.8.0"
|
||||
|
||||
"@types/request@^2.48.7":
|
||||
|
|
Loading…
Reference in New Issue