Merge remote-tracking branch 'origin/master' into feature/formblock-styles-tab-reflow
This commit is contained in:
commit
b6de1c544b
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": "2.11.37",
|
||||
"version": "2.11.39",
|
||||
"npmClient": "yarn",
|
||||
"packages": [
|
||||
"packages/*"
|
||||
|
|
|
@ -119,8 +119,8 @@ export class Writethrough {
|
|||
this.writeRateMs = writeRateMs
|
||||
}
|
||||
|
||||
async put(doc: any) {
|
||||
return put(this.db, doc, this.writeRateMs)
|
||||
async put(doc: any, writeRateMs: number = this.writeRateMs) {
|
||||
return put(this.db, doc, writeRateMs)
|
||||
}
|
||||
|
||||
async get(id: string) {
|
||||
|
|
|
@ -21,17 +21,21 @@ import {
|
|||
User,
|
||||
UserStatus,
|
||||
UserGroup,
|
||||
ContextUser,
|
||||
} from "@budibase/types"
|
||||
import {
|
||||
getAccountHolderFromUserIds,
|
||||
isAdmin,
|
||||
isCreator,
|
||||
validateUniqueUser,
|
||||
} from "./utils"
|
||||
import { searchExistingEmails } from "./lookup"
|
||||
import { hash } from "../utils"
|
||||
|
||||
type QuotaUpdateFn = (change: number, cb?: () => Promise<any>) => Promise<any>
|
||||
type QuotaUpdateFn = (
|
||||
change: number,
|
||||
creatorsChange: number,
|
||||
cb?: () => Promise<any>
|
||||
) => Promise<any>
|
||||
type GroupUpdateFn = (groupId: string, userIds: string[]) => Promise<any>
|
||||
type FeatureFn = () => Promise<Boolean>
|
||||
type GroupGetFn = (ids: string[]) => Promise<UserGroup[]>
|
||||
|
@ -135,7 +139,7 @@ export class UserDB {
|
|||
if (!fullUser.roles) {
|
||||
fullUser.roles = {}
|
||||
}
|
||||
// add the active status to a user if its not provided
|
||||
// add the active status to a user if it's not provided
|
||||
if (fullUser.status == null) {
|
||||
fullUser.status = UserStatus.ACTIVE
|
||||
}
|
||||
|
@ -246,7 +250,8 @@ export class UserDB {
|
|||
}
|
||||
|
||||
const change = dbUser ? 0 : 1 // no change if there is existing user
|
||||
return UserDB.quotas.addUsers(change, async () => {
|
||||
const creatorsChange = isCreator(dbUser) !== isCreator(user) ? 1 : 0
|
||||
return UserDB.quotas.addUsers(change, creatorsChange, async () => {
|
||||
await validateUniqueUser(email, tenantId)
|
||||
|
||||
let builtUser = await UserDB.buildUser(user, opts, tenantId, dbUser)
|
||||
|
@ -308,6 +313,7 @@ export class UserDB {
|
|||
|
||||
let usersToSave: any[] = []
|
||||
let newUsers: any[] = []
|
||||
let newCreators: any[] = []
|
||||
|
||||
const emails = newUsersRequested.map((user: User) => user.email)
|
||||
const existingEmails = await searchExistingEmails(emails)
|
||||
|
@ -328,59 +334,66 @@ export class UserDB {
|
|||
}
|
||||
newUser.userGroups = groups
|
||||
newUsers.push(newUser)
|
||||
if (isCreator(newUser)) {
|
||||
newCreators.push(newUser)
|
||||
}
|
||||
}
|
||||
|
||||
const account = await accountSdk.getAccountByTenantId(tenantId)
|
||||
return UserDB.quotas.addUsers(newUsers.length, async () => {
|
||||
// create the promises array that will be called by bulkDocs
|
||||
newUsers.forEach((user: any) => {
|
||||
usersToSave.push(
|
||||
UserDB.buildUser(
|
||||
user,
|
||||
{
|
||||
hashPassword: true,
|
||||
requirePassword: user.requirePassword,
|
||||
},
|
||||
tenantId,
|
||||
undefined, // no dbUser
|
||||
account
|
||||
return UserDB.quotas.addUsers(
|
||||
newUsers.length,
|
||||
newCreators.length,
|
||||
async () => {
|
||||
// create the promises array that will be called by bulkDocs
|
||||
newUsers.forEach((user: any) => {
|
||||
usersToSave.push(
|
||||
UserDB.buildUser(
|
||||
user,
|
||||
{
|
||||
hashPassword: true,
|
||||
requirePassword: user.requirePassword,
|
||||
},
|
||||
tenantId,
|
||||
undefined, // no dbUser
|
||||
account
|
||||
)
|
||||
)
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
const usersToBulkSave = await Promise.all(usersToSave)
|
||||
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
||||
const usersToBulkSave = await Promise.all(usersToSave)
|
||||
await usersCore.bulkUpdateGlobalUsers(usersToBulkSave)
|
||||
|
||||
// Post-processing of bulk added users, e.g. events and cache operations
|
||||
for (const user of usersToBulkSave) {
|
||||
// TODO: Refactor to bulk insert users into the info db
|
||||
// instead of relying on looping tenant creation
|
||||
await platform.users.addUser(tenantId, user._id, user.email)
|
||||
await eventHelpers.handleSaveEvents(user, undefined)
|
||||
}
|
||||
// Post-processing of bulk added users, e.g. events and cache operations
|
||||
for (const user of usersToBulkSave) {
|
||||
// TODO: Refactor to bulk insert users into the info db
|
||||
// instead of relying on looping tenant creation
|
||||
await platform.users.addUser(tenantId, user._id, user.email)
|
||||
await eventHelpers.handleSaveEvents(user, undefined)
|
||||
}
|
||||
|
||||
const saved = usersToBulkSave.map(user => {
|
||||
return {
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
}
|
||||
})
|
||||
|
||||
// now update the groups
|
||||
if (Array.isArray(saved) && groups) {
|
||||
const groupPromises = []
|
||||
const createdUserIds = saved.map(user => user._id)
|
||||
for (let groupId of groups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
||||
}
|
||||
await Promise.all(groupPromises)
|
||||
}
|
||||
|
||||
const saved = usersToBulkSave.map(user => {
|
||||
return {
|
||||
_id: user._id,
|
||||
email: user.email,
|
||||
successful: saved,
|
||||
unsuccessful,
|
||||
}
|
||||
})
|
||||
|
||||
// now update the groups
|
||||
if (Array.isArray(saved) && groups) {
|
||||
const groupPromises = []
|
||||
const createdUserIds = saved.map(user => user._id)
|
||||
for (let groupId of groups) {
|
||||
groupPromises.push(UserDB.groups.addUsers(groupId, createdUserIds))
|
||||
}
|
||||
await Promise.all(groupPromises)
|
||||
}
|
||||
|
||||
return {
|
||||
successful: saved,
|
||||
unsuccessful,
|
||||
}
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
static async bulkDelete(userIds: string[]): Promise<BulkUserDeleted> {
|
||||
|
@ -420,11 +433,12 @@ export class UserDB {
|
|||
_deleted: true,
|
||||
}))
|
||||
const dbResponse = await usersCore.bulkUpdateGlobalUsers(toDelete)
|
||||
const creatorsToDelete = usersToDelete.filter(isCreator)
|
||||
|
||||
await UserDB.quotas.removeUsers(toDelete.length)
|
||||
for (let user of usersToDelete) {
|
||||
await bulkDeleteProcessing(user)
|
||||
}
|
||||
await UserDB.quotas.removeUsers(toDelete.length, creatorsToDelete.length)
|
||||
|
||||
// Build Response
|
||||
// index users by id
|
||||
|
@ -473,7 +487,8 @@ export class UserDB {
|
|||
|
||||
await db.remove(userId, dbUser._rev)
|
||||
|
||||
await UserDB.quotas.removeUsers(1)
|
||||
const creatorsToDelete = isCreator(dbUser) ? 1 : 0
|
||||
await UserDB.quotas.removeUsers(1, creatorsToDelete)
|
||||
await eventHelpers.handleDeleteEvents(dbUser)
|
||||
await cache.user.invalidateUser(userId)
|
||||
await sessions.invalidateSessions(userId, { reason: "deletion" })
|
||||
|
|
|
@ -14,14 +14,15 @@ import {
|
|||
} from "../db"
|
||||
import {
|
||||
BulkDocsResponse,
|
||||
ContextUser,
|
||||
SearchQuery,
|
||||
SearchQueryOperators,
|
||||
SearchUsersRequest,
|
||||
User,
|
||||
ContextUser,
|
||||
} from "@budibase/types"
|
||||
import * as context from "../context"
|
||||
import { getGlobalDB } from "../context"
|
||||
import * as context from "../context"
|
||||
import { isCreator } from "./utils"
|
||||
|
||||
type GetOpts = { cleanup?: boolean }
|
||||
|
||||
|
@ -283,6 +284,19 @@ export async function getUserCount() {
|
|||
return response.total_rows
|
||||
}
|
||||
|
||||
export async function getCreatorCount() {
|
||||
let creators = 0
|
||||
async function iterate(startPage?: string) {
|
||||
const page = await paginatedUsers({ bookmark: startPage })
|
||||
creators += page.data.filter(isCreator).length
|
||||
if (page.hasNextPage) {
|
||||
await iterate(page.nextPage)
|
||||
}
|
||||
}
|
||||
await iterate()
|
||||
return creators
|
||||
}
|
||||
|
||||
// used to remove the builder/admin permissions, for processing the
|
||||
// user as an app user (they may have some specific role/group
|
||||
export function removePortalUserPermissions(user: User | ContextUser) {
|
||||
|
|
|
@ -10,6 +10,7 @@ import { getAccountByTenantId } from "../accounts"
|
|||
// extract from shared-core to make easily accessible from backend-core
|
||||
export const isBuilder = sdk.users.isBuilder
|
||||
export const isAdmin = sdk.users.isAdmin
|
||||
export const isCreator = sdk.users.isCreator
|
||||
export const isGlobalBuilder = sdk.users.isGlobalBuilder
|
||||
export const isAdminOrBuilder = sdk.users.isAdminOrBuilder
|
||||
export const hasAdminPermissions = sdk.users.hasAdminPermissions
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
const _ = require('lodash/fp')
|
||||
const {structures} = require("../../../tests")
|
||||
|
||||
jest.mock("../../../src/context")
|
||||
jest.mock("../../../src/db")
|
||||
|
||||
const context = require("../../../src/context")
|
||||
const db = require("../../../src/db")
|
||||
|
||||
const {getCreatorCount} = require('../../../src/users/users')
|
||||
|
||||
describe("Users", () => {
|
||||
|
||||
let getGlobalDBMock
|
||||
let getGlobalUserParamsMock
|
||||
let paginationMock
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
|
||||
getGlobalDBMock = jest.spyOn(context, "getGlobalDB")
|
||||
getGlobalUserParamsMock = jest.spyOn(db, "getGlobalUserParams")
|
||||
paginationMock = jest.spyOn(db, "pagination")
|
||||
})
|
||||
|
||||
it("Retrieves the number of creators", async () => {
|
||||
const getUsers = (offset, limit, creators = false) => {
|
||||
const range = _.range(offset, limit)
|
||||
const opts = creators ? {builder: {global: true}} : undefined
|
||||
return range.map(() => structures.users.user(opts))
|
||||
}
|
||||
const page1Data = getUsers(0, 8)
|
||||
const page2Data = getUsers(8, 12, true)
|
||||
getGlobalDBMock.mockImplementation(() => ({
|
||||
name : "fake-db",
|
||||
allDocs: () => ({
|
||||
rows: [...page1Data, ...page2Data]
|
||||
})
|
||||
}))
|
||||
paginationMock.mockImplementationOnce(() => ({
|
||||
data: page1Data,
|
||||
hasNextPage: true,
|
||||
nextPage: "1"
|
||||
}))
|
||||
paginationMock.mockImplementation(() => ({
|
||||
data: page2Data,
|
||||
hasNextPage: false,
|
||||
nextPage: undefined
|
||||
}))
|
||||
const creatorsCount = await getCreatorCount()
|
||||
expect(creatorsCount).toBe(4)
|
||||
expect(paginationMock).toHaveBeenCalledTimes(2)
|
||||
})
|
||||
})
|
|
@ -72,6 +72,11 @@ export function quotas(): Quotas {
|
|||
value: 1,
|
||||
triggers: [],
|
||||
},
|
||||
creators: {
|
||||
name: "Creators",
|
||||
value: 1,
|
||||
triggers: [],
|
||||
},
|
||||
userGroups: {
|
||||
name: "User Groups",
|
||||
value: 1,
|
||||
|
@ -118,6 +123,10 @@ export function customer(): Customer {
|
|||
export function subscription(): Subscription {
|
||||
return {
|
||||
amount: 10000,
|
||||
amounts: {
|
||||
user: 10000,
|
||||
creator: 0,
|
||||
},
|
||||
cancelAt: undefined,
|
||||
currency: "usd",
|
||||
currentPeriodEnd: 0,
|
||||
|
@ -126,6 +135,10 @@ export function subscription(): Subscription {
|
|||
duration: PriceDuration.MONTHLY,
|
||||
pastDueAt: undefined,
|
||||
quantity: 0,
|
||||
quantities: {
|
||||
user: 0,
|
||||
creator: 0,
|
||||
},
|
||||
status: "active",
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { MonthlyQuotaName, QuotaUsage } from "@budibase/types"
|
||||
|
||||
export const usage = (): QuotaUsage => {
|
||||
export const usage = (users: number = 0, creators: number = 0): QuotaUsage => {
|
||||
return {
|
||||
_id: "usage_quota",
|
||||
quotaReset: new Date().toISOString(),
|
||||
|
@ -58,7 +58,8 @@ export const usage = (): QuotaUsage => {
|
|||
usageQuota: {
|
||||
apps: 0,
|
||||
plugins: 0,
|
||||
users: 0,
|
||||
users,
|
||||
creators,
|
||||
userGroups: 0,
|
||||
rows: 0,
|
||||
triggers: {},
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
})
|
||||
</script>
|
||||
|
||||
<TestimonialPage>
|
||||
<TestimonialPage enabled={$organisation.testimonialsEnabled}>
|
||||
<Layout gap="S" noPadding>
|
||||
<img alt="logo" src={$organisation.logoUrl || Logo} />
|
||||
<span class="heading-wrap">
|
||||
|
|
|
@ -53,7 +53,7 @@
|
|||
})
|
||||
</script>
|
||||
|
||||
<TestimonialPage>
|
||||
<TestimonialPage enabled={$organisation.testimonialsEnabled}>
|
||||
<Layout gap="S" noPadding>
|
||||
{#if loaded}
|
||||
<img alt="logo" src={$organisation.logoUrl || Logo} />
|
||||
|
|
|
@ -9,7 +9,9 @@ export const buildRelationshipEndpoints = API => ({
|
|||
if (!tableId || !rowId) {
|
||||
return []
|
||||
}
|
||||
const response = await API.get({ url: `/api/${tableId}/${rowId}/enrich` })
|
||||
const response = await API.get({
|
||||
url: `/api/${tableId}/${rowId}/enrich?field=${fieldName}`,
|
||||
})
|
||||
if (!fieldName) {
|
||||
return response || []
|
||||
} else {
|
||||
|
|
|
@ -260,29 +260,31 @@
|
|||
class:wrap={editable || contentLines > 1}
|
||||
on:wheel={e => (focused ? e.stopPropagation() : null)}
|
||||
>
|
||||
{#each value || [] as relationship}
|
||||
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
|
||||
<div class="badge">
|
||||
<span
|
||||
on:click={editable
|
||||
? () => showRelationship(relationship._id)
|
||||
: null}
|
||||
>
|
||||
{readable(
|
||||
relationship[primaryDisplay] || relationship.primaryDisplay
|
||||
)}
|
||||
</span>
|
||||
{#if editable}
|
||||
<Icon
|
||||
name="Close"
|
||||
size="XS"
|
||||
hoverable
|
||||
on:click={() => toggleRow(relationship)}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
{#if Array.isArray(value) && value.length}
|
||||
{#each value as relationship}
|
||||
{#if relationship[primaryDisplay] || relationship.primaryDisplay}
|
||||
<div class="badge">
|
||||
<span
|
||||
on:click={editable
|
||||
? () => showRelationship(relationship._id)
|
||||
: null}
|
||||
>
|
||||
{readable(
|
||||
relationship[primaryDisplay] || relationship.primaryDisplay
|
||||
)}
|
||||
</span>
|
||||
{#if editable}
|
||||
<Icon
|
||||
name="Close"
|
||||
size="XS"
|
||||
hoverable
|
||||
on:click={() => toggleRow(relationship)}
|
||||
/>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
{/each}
|
||||
{/if}
|
||||
{#if editable}
|
||||
<div class="add" on:click={open}>
|
||||
<Icon name="Add" size="S" />
|
||||
|
@ -318,7 +320,7 @@
|
|||
<div class="searching">
|
||||
<ProgressCircle size="S" />
|
||||
</div>
|
||||
{:else if searchResults?.length}
|
||||
{:else if Array.isArray(searchResults) && searchResults.length}
|
||||
<div class="results">
|
||||
{#each searchResults as row, idx}
|
||||
<div
|
||||
|
|
|
@ -1 +1 @@
|
|||
Subproject commit 044bec6447066b215932d6726c437e7ec5a9e42e
|
||||
Subproject commit 570d14aa44aa88f4d053856322210f0008ba5c76
|
|
@ -280,17 +280,8 @@ function isEditableColumn(column: FieldSchema) {
|
|||
return !(isExternalAutoColumn || isFormula)
|
||||
}
|
||||
|
||||
export type ExternalRequestReturnType<T> = T extends Operation.READ
|
||||
?
|
||||
| Row[]
|
||||
| {
|
||||
row: Row
|
||||
table: Table
|
||||
}
|
||||
: {
|
||||
row: Row
|
||||
table: Table
|
||||
}
|
||||
export type ExternalRequestReturnType<T extends Operation> =
|
||||
T extends Operation.READ ? Row[] : { row: Row; table: Table }
|
||||
|
||||
export class ExternalRequest<T extends Operation> {
|
||||
private readonly operation: T
|
||||
|
@ -857,11 +848,12 @@ export class ExternalRequest<T extends Operation> {
|
|||
}
|
||||
const output = this.outputProcessing(response, table, relationships)
|
||||
// if reading it'll just be an array of rows, return whole thing
|
||||
const result = (
|
||||
operation === Operation.READ && Array.isArray(response)
|
||||
? output
|
||||
: { row: output[0], table }
|
||||
) as ExternalRequestReturnType<T>
|
||||
return result
|
||||
if (operation === Operation.READ) {
|
||||
return (
|
||||
Array.isArray(output) ? output : [output]
|
||||
) as ExternalRequestReturnType<T>
|
||||
} else {
|
||||
return { row: output[0], table } as ExternalRequestReturnType<T>
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ export async function handleRequest<T extends Operation>(
|
|||
return [] as any
|
||||
}
|
||||
|
||||
return new ExternalRequest(operation, tableId, opts?.datasource).run(
|
||||
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run(
|
||||
opts || {}
|
||||
)
|
||||
}
|
||||
|
@ -148,17 +148,17 @@ export async function find(ctx: UserCtx): Promise<Row> {
|
|||
export async function destroy(ctx: UserCtx) {
|
||||
const tableId = utils.getTableId(ctx)
|
||||
const _id = ctx.request.body._id
|
||||
const { row } = (await handleRequest(Operation.DELETE, tableId, {
|
||||
const { row } = await handleRequest(Operation.DELETE, tableId, {
|
||||
id: breakRowIdField(_id),
|
||||
includeSqlRelationships: IncludeRelationship.EXCLUDE,
|
||||
})) as { row: Row }
|
||||
})
|
||||
return { response: { ok: true, id: _id }, row }
|
||||
}
|
||||
|
||||
export async function bulkDestroy(ctx: UserCtx) {
|
||||
const { rows } = ctx.request.body
|
||||
const tableId = utils.getTableId(ctx)
|
||||
let promises: Promise<Row[] | { row: Row; table: Table }>[] = []
|
||||
let promises: Promise<{ row: Row; table: Table }>[] = []
|
||||
for (let row of rows) {
|
||||
promises.push(
|
||||
handleRequest(Operation.DELETE, tableId, {
|
||||
|
@ -167,7 +167,7 @@ export async function bulkDestroy(ctx: UserCtx) {
|
|||
})
|
||||
)
|
||||
}
|
||||
const responses = (await Promise.all(promises)) as { row: Row }[]
|
||||
const responses = await Promise.all(promises)
|
||||
return { response: { ok: true }, rows: responses.map(resp => resp.row) }
|
||||
}
|
||||
|
||||
|
@ -183,11 +183,11 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
|
|||
ctx.throw(400, "Datasource has not been configured for plus API.")
|
||||
}
|
||||
const tables = datasource.entities
|
||||
const response = (await handleRequest(Operation.READ, tableId, {
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
id,
|
||||
datasource,
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})) as Row[]
|
||||
})
|
||||
const table: Table = tables[tableName]
|
||||
const row = response[0]
|
||||
// this seems like a lot of work, but basically we need to dig deeper for the enrich
|
||||
|
|
|
@ -26,6 +26,7 @@ import { fixRow } from "../public/rows"
|
|||
import sdk from "../../../sdk"
|
||||
import * as exporters from "../view/exporters"
|
||||
import { apiFileReturn } from "../../../utilities/fileSystem"
|
||||
import { Format } from "../view/exporters"
|
||||
export * as views from "./views"
|
||||
|
||||
function pickApi(tableId: any) {
|
||||
|
@ -267,7 +268,7 @@ export const exportRows = async (
|
|||
async () => {
|
||||
const { fileName, content } = await sdk.rows.exportRows({
|
||||
tableId,
|
||||
format,
|
||||
format: format as Format,
|
||||
rowIds: rows,
|
||||
columns,
|
||||
query,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import * as linkRows from "../../../db/linkedRows"
|
||||
import {
|
||||
generateRowID,
|
||||
getMultiIDParams,
|
||||
getTableIDFromRowID,
|
||||
InternalTables,
|
||||
} from "../../../db/utils"
|
||||
|
@ -24,6 +25,8 @@ import {
|
|||
UserCtx,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
|
||||
import { flatten } from "lodash"
|
||||
|
||||
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||
const tableId = utils.getTableId(ctx)
|
||||
|
@ -154,7 +157,7 @@ export async function destroy(ctx: UserCtx) {
|
|||
if (row.tableId !== tableId) {
|
||||
throw "Supplied tableId doesn't match the row's tableId"
|
||||
}
|
||||
const table = await sdk.tables.getTable(row.tableId)
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
// update the row to include full relationships before deleting them
|
||||
row = await outputProcessing(table, row, {
|
||||
squash: false,
|
||||
|
@ -164,7 +167,7 @@ export async function destroy(ctx: UserCtx) {
|
|||
await linkRows.updateLinks({
|
||||
eventType: linkRows.EventType.ROW_DELETE,
|
||||
row,
|
||||
tableId: row.tableId,
|
||||
tableId,
|
||||
})
|
||||
// remove any attachments that were on the row from object storage
|
||||
await cleanupAttachments(table, { row })
|
||||
|
@ -225,60 +228,52 @@ export async function bulkDestroy(ctx: UserCtx) {
|
|||
}
|
||||
|
||||
export async function fetchEnrichedRow(ctx: UserCtx) {
|
||||
const fieldName = ctx.request.query.field as string | undefined
|
||||
const db = context.getAppDB()
|
||||
const tableId = utils.getTableId(ctx)
|
||||
const rowId = ctx.params.rowId
|
||||
// need table to work out where links go in row
|
||||
let [table, row] = await Promise.all([
|
||||
const rowId = ctx.params.rowId as string
|
||||
// need table to work out where links go in row, as well as the link docs
|
||||
let response = await Promise.all([
|
||||
sdk.tables.getTable(tableId),
|
||||
utils.findRow(ctx, tableId, rowId),
|
||||
linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
|
||||
])
|
||||
// get the link docs
|
||||
const linkVals = (await linkRows.getLinkDocuments({
|
||||
tableId,
|
||||
rowId,
|
||||
})) as LinkDocumentValue[]
|
||||
const table = response[0] as Table
|
||||
const row = response[1] as Row
|
||||
const linkVals = response[2] as LinkDocumentValue[]
|
||||
// look up the actual rows based on the ids
|
||||
let response = (
|
||||
await db.allDocs({
|
||||
include_docs: true,
|
||||
keys: linkVals.map(linkVal => linkVal.id),
|
||||
})
|
||||
).rows.map(row => row.doc)
|
||||
// group responses by table
|
||||
let groups: any = {},
|
||||
tables: Record<string, Table> = {}
|
||||
for (let row of response) {
|
||||
if (!row.tableId) {
|
||||
row.tableId = getTableIDFromRowID(row._id)
|
||||
}
|
||||
const linkedTableId = row.tableId
|
||||
if (groups[linkedTableId] == null) {
|
||||
groups[linkedTableId] = [row]
|
||||
tables[linkedTableId] = await db.get(linkedTableId)
|
||||
} else {
|
||||
groups[linkedTableId].push(row)
|
||||
}
|
||||
}
|
||||
let linkedRows: Row[] = []
|
||||
for (let [tableId, rows] of Object.entries(groups)) {
|
||||
// need to include the IDs in these rows for any links they may have
|
||||
linkedRows = linkedRows.concat(
|
||||
await outputProcessing(tables[tableId], rows as Row[])
|
||||
const params = getMultiIDParams(linkVals.map(linkVal => linkVal.id))
|
||||
let linkedRows = (await db.allDocs(params)).rows.map(row => row.doc)
|
||||
|
||||
// get the linked tables
|
||||
const linkTableIds = getLinkedTableIDs(table as Table)
|
||||
const linkTables = await sdk.tables.getTables(linkTableIds)
|
||||
|
||||
// perform output processing
|
||||
let final: Promise<Row[]>[] = []
|
||||
for (let linkTable of linkTables) {
|
||||
const relatedRows = linkedRows.filter(row => row.tableId === linkTable._id)
|
||||
// include the row being enriched for performance reasons, don't need to fetch it to include
|
||||
final = final.concat(
|
||||
outputProcessing(linkTable, relatedRows, {
|
||||
// have to clone to avoid JSON cycle
|
||||
fromRow: cloneDeep(row),
|
||||
squash: true,
|
||||
})
|
||||
)
|
||||
}
|
||||
// finalise the promises
|
||||
linkedRows = flatten(await Promise.all(final))
|
||||
|
||||
// insert the link rows in the correct place throughout the main row
|
||||
for (let fieldName of Object.keys(table.schema)) {
|
||||
let field = table.schema[fieldName]
|
||||
if (field.type === FieldTypes.LINK) {
|
||||
// find the links that pertain to this field, get their indexes
|
||||
const linkIndexes = linkVals
|
||||
.filter(link => link.fieldName === fieldName)
|
||||
.map(link => linkVals.indexOf(link))
|
||||
// find the links that pertain to this field
|
||||
const links = linkVals.filter(link => link.fieldName === fieldName)
|
||||
// find the rows that the links state are linked to this field
|
||||
row[fieldName] = linkedRows.filter((linkRow, index) =>
|
||||
linkIndexes.includes(index)
|
||||
row[fieldName] = linkedRows.filter(linkRow =>
|
||||
links.find(link => link.id === linkRow._id)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -149,7 +149,7 @@ export async function finaliseRow(
|
|||
await db.put(table)
|
||||
} catch (err: any) {
|
||||
if (err.status === 409) {
|
||||
const updatedTable = await sdk.tables.getTable(table._id)
|
||||
const updatedTable = await sdk.tables.getTable(table._id!)
|
||||
let response = processAutoColumn(null, updatedTable, row, {
|
||||
reprocessing: true,
|
||||
})
|
||||
|
|
|
@ -17,20 +17,6 @@ import sdk from "../../../sdk"
|
|||
import validateJs from "validate.js"
|
||||
import { cloneDeep } from "lodash/fp"
|
||||
|
||||
function isForeignKey(key: string, table: Table) {
|
||||
const relationships = Object.values(table.schema).filter(
|
||||
column => column.type === FieldType.LINK
|
||||
)
|
||||
return relationships.some(
|
||||
relationship =>
|
||||
(
|
||||
relationship as
|
||||
| OneToManyRelationshipFieldMetadata
|
||||
| ManyToOneRelationshipFieldMetadata
|
||||
).foreignKey === key
|
||||
)
|
||||
}
|
||||
|
||||
validateJs.extend(validateJs.validators.datetime, {
|
||||
parse: function (value: string) {
|
||||
return new Date(value).getTime()
|
||||
|
@ -60,7 +46,7 @@ export async function findRow(ctx: UserCtx, tableId: string, rowId: string) {
|
|||
return row
|
||||
}
|
||||
|
||||
export function getTableId(ctx: Ctx) {
|
||||
export function getTableId(ctx: Ctx): string {
|
||||
// top priority, use the URL first
|
||||
if (ctx.params?.sourceId) {
|
||||
return ctx.params.sourceId
|
||||
|
@ -77,112 +63,7 @@ export function getTableId(ctx: Ctx) {
|
|||
if (ctx.params?.viewName) {
|
||||
return ctx.params.viewName
|
||||
}
|
||||
}
|
||||
|
||||
export async function validate({
|
||||
tableId,
|
||||
row,
|
||||
table,
|
||||
}: {
|
||||
tableId?: string
|
||||
row: Row
|
||||
table?: Table
|
||||
}) {
|
||||
let fetchedTable: Table
|
||||
if (!table) {
|
||||
fetchedTable = await sdk.tables.getTable(tableId)
|
||||
} else {
|
||||
fetchedTable = table
|
||||
}
|
||||
const errors: any = {}
|
||||
for (let fieldName of Object.keys(fetchedTable.schema)) {
|
||||
const column = fetchedTable.schema[fieldName]
|
||||
const constraints = cloneDeep(column.constraints)
|
||||
const type = column.type
|
||||
// foreign keys are likely to be enriched
|
||||
if (isForeignKey(fieldName, fetchedTable)) {
|
||||
continue
|
||||
}
|
||||
// formulas shouldn't validated, data will be deleted anyway
|
||||
if (type === FieldTypes.FORMULA || column.autocolumn) {
|
||||
continue
|
||||
}
|
||||
// special case for options, need to always allow unselected (empty)
|
||||
if (type === FieldTypes.OPTIONS && constraints?.inclusion) {
|
||||
constraints.inclusion.push(null as any, "")
|
||||
}
|
||||
let res
|
||||
|
||||
// Validate.js doesn't seem to handle array
|
||||
if (type === FieldTypes.ARRAY && row[fieldName]) {
|
||||
if (row[fieldName].length) {
|
||||
if (!Array.isArray(row[fieldName])) {
|
||||
row[fieldName] = row[fieldName].split(",")
|
||||
}
|
||||
row[fieldName].map((val: any) => {
|
||||
if (
|
||||
!constraints?.inclusion?.includes(val) &&
|
||||
constraints?.inclusion?.length !== 0
|
||||
) {
|
||||
errors[fieldName] = "Field not in list"
|
||||
}
|
||||
})
|
||||
} else if (constraints?.presence && row[fieldName].length === 0) {
|
||||
// non required MultiSelect creates an empty array, which should not throw errors
|
||||
errors[fieldName] = [`${fieldName} is required`]
|
||||
}
|
||||
} else if (
|
||||
(type === FieldTypes.ATTACHMENT || type === FieldTypes.JSON) &&
|
||||
typeof row[fieldName] === "string"
|
||||
) {
|
||||
// this should only happen if there is an error
|
||||
try {
|
||||
const json = JSON.parse(row[fieldName])
|
||||
if (type === FieldTypes.ATTACHMENT) {
|
||||
if (Array.isArray(json)) {
|
||||
row[fieldName] = json
|
||||
} else {
|
||||
errors[fieldName] = [`Must be an array`]
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
errors[fieldName] = [`Contains invalid JSON`]
|
||||
}
|
||||
} else {
|
||||
res = validateJs.single(row[fieldName], constraints)
|
||||
}
|
||||
if (res) errors[fieldName] = res
|
||||
}
|
||||
return { valid: Object.keys(errors).length === 0, errors }
|
||||
}
|
||||
|
||||
// don't do a pure falsy check, as 0 is included
|
||||
// https://github.com/Budibase/budibase/issues/10118
|
||||
export function removeEmptyFilters(filters: SearchFilters) {
|
||||
for (let filterField of NoEmptyFilterStrings) {
|
||||
if (!filters[filterField]) {
|
||||
continue
|
||||
}
|
||||
|
||||
for (let filterType of Object.keys(filters)) {
|
||||
if (filterType !== filterField) {
|
||||
continue
|
||||
}
|
||||
// don't know which one we're checking, type could be anything
|
||||
const value = filters[filterType] as unknown
|
||||
if (typeof value === "object") {
|
||||
for (let [key, value] of Object.entries(
|
||||
filters[filterType] as object
|
||||
)) {
|
||||
if (value == null || value === "") {
|
||||
// @ts-ignore
|
||||
delete filters[filterField][key]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return filters
|
||||
throw new Error("Unable to find table ID in request")
|
||||
}
|
||||
|
||||
export function isUserMetadataTable(tableId: string) {
|
||||
|
|
|
@ -16,6 +16,7 @@ import {
|
|||
Table,
|
||||
TableResponse,
|
||||
UserCtx,
|
||||
Row,
|
||||
} from "@budibase/types"
|
||||
import sdk from "../../../sdk"
|
||||
import { jsonFromCsvString } from "../../../utilities/csv"
|
||||
|
@ -139,8 +140,7 @@ export async function validateNewTableImport(ctx: UserCtx) {
|
|||
}
|
||||
|
||||
export async function validateExistingTableImport(ctx: UserCtx) {
|
||||
const { rows, tableId }: { rows: unknown; tableId: unknown } =
|
||||
ctx.request.body
|
||||
const { rows, tableId }: { rows: Row[]; tableId?: string } = ctx.request.body
|
||||
|
||||
let schema = null
|
||||
if (tableId) {
|
||||
|
|
|
@ -1,13 +1,11 @@
|
|||
import { generateUserFlagID } from "../../db/utils"
|
||||
import { InternalTables } from "../../db/utils"
|
||||
import { generateUserFlagID, InternalTables } from "../../db/utils"
|
||||
import { getFullUser } from "../../utilities/users"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { Ctx, UserCtx } from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
export async function fetchMetadata(ctx: Ctx) {
|
||||
const users = await sdk.users.fetchMetadata()
|
||||
ctx.body = users
|
||||
ctx.body = await sdk.users.fetchMetadata()
|
||||
}
|
||||
|
||||
export async function updateSelfMetadata(ctx: UserCtx) {
|
||||
|
|
|
@ -9,13 +9,13 @@ import {
|
|||
getLinkedTable,
|
||||
} from "./linkUtils"
|
||||
import flatten from "lodash/flatten"
|
||||
import { FieldTypes } from "../../constants"
|
||||
import { getMultiIDParams, USER_METDATA_PREFIX } from "../utils"
|
||||
import partition from "lodash/partition"
|
||||
import { getGlobalUsersFromMetadata } from "../../utilities/global"
|
||||
import { processFormulas } from "../../utilities/rowProcessor"
|
||||
import { context } from "@budibase/backend-core"
|
||||
import { Table, Row, LinkDocumentValue } from "@budibase/types"
|
||||
import { Table, Row, LinkDocumentValue, FieldType } from "@budibase/types"
|
||||
import sdk from "../../sdk"
|
||||
|
||||
export { IncludeDocs, getLinkDocuments, createLinkView } from "./linkUtils"
|
||||
|
||||
|
@ -35,7 +35,7 @@ export const EventType = {
|
|||
|
||||
function clearRelationshipFields(table: Table, rows: Row[]) {
|
||||
for (let [key, field] of Object.entries(table.schema)) {
|
||||
if (field.type === FieldTypes.LINK) {
|
||||
if (field.type === FieldType.LINK) {
|
||||
rows = rows.map(row => {
|
||||
delete row[key]
|
||||
return row
|
||||
|
@ -45,7 +45,7 @@ function clearRelationshipFields(table: Table, rows: Row[]) {
|
|||
return rows
|
||||
}
|
||||
|
||||
async function getLinksForRows(rows: Row[]) {
|
||||
async function getLinksForRows(rows: Row[]): Promise<LinkDocumentValue[]> {
|
||||
const tableIds = [...new Set(rows.map(el => el.tableId))]
|
||||
// start by getting all the link values for performance reasons
|
||||
const promises = tableIds.map(tableId =>
|
||||
|
@ -146,32 +146,57 @@ export async function updateLinks(args: {
|
|||
* This is required for formula fields, this may only be utilised internally (for now).
|
||||
* @param table The table from which the rows originated.
|
||||
* @param rows The rows which are to be enriched.
|
||||
* @param opts optional - options like passing in a base row to use for enrichment.
|
||||
* @return returns the rows with all of the enriched relationships on it.
|
||||
*/
|
||||
export async function attachFullLinkedDocs(table: Table, rows: Row[]) {
|
||||
export async function attachFullLinkedDocs(
|
||||
table: Table,
|
||||
rows: Row[],
|
||||
opts?: { fromRow?: Row }
|
||||
) {
|
||||
const linkedTableIds = getLinkedTableIDs(table)
|
||||
if (linkedTableIds.length === 0) {
|
||||
return rows
|
||||
}
|
||||
// get all the links
|
||||
const links = (await getLinksForRows(rows)).filter(link =>
|
||||
// get tables and links
|
||||
let response = await Promise.all([
|
||||
getLinksForRows(rows),
|
||||
sdk.tables.getTables(linkedTableIds),
|
||||
])
|
||||
// find the links that pertain to one of the rows that is being enriched
|
||||
const links = (response[0] as LinkDocumentValue[]).filter(link =>
|
||||
rows.some(row => row._id === link.thisId)
|
||||
)
|
||||
// if fromRow has been passed in, then we don't need to fetch it (optimisation)
|
||||
let linksWithoutFromRow = links
|
||||
if (opts?.fromRow) {
|
||||
linksWithoutFromRow = links.filter(link => link.id !== opts?.fromRow?._id)
|
||||
}
|
||||
const linkedTables = response[1] as Table[]
|
||||
// clear any existing links that could be dupe'd
|
||||
rows = clearRelationshipFields(table, rows)
|
||||
// now get the docs and combine into the rows
|
||||
let linked = await getFullLinkedDocs(links)
|
||||
const linkedTables: Table[] = []
|
||||
let linked = []
|
||||
if (linksWithoutFromRow.length > 0) {
|
||||
linked = await getFullLinkedDocs(linksWithoutFromRow)
|
||||
}
|
||||
for (let row of rows) {
|
||||
for (let link of links.filter(link => link.thisId === row._id)) {
|
||||
if (row[link.fieldName] == null) {
|
||||
row[link.fieldName] = []
|
||||
}
|
||||
const linkedRow = linked.find(row => row._id === link.id)
|
||||
let linkedRow: Row
|
||||
if (opts?.fromRow && opts?.fromRow?._id === link.id) {
|
||||
linkedRow = opts.fromRow!
|
||||
} else {
|
||||
linkedRow = linked.find(row => row._id === link.id)
|
||||
}
|
||||
if (linkedRow) {
|
||||
const linkedTableId =
|
||||
linkedRow.tableId || getRelatedTableForField(table, link.fieldName)
|
||||
const linkedTable = await getLinkedTable(linkedTableId, linkedTables)
|
||||
const linkedTable = linkedTables.find(
|
||||
table => table._id === linkedTableId
|
||||
)
|
||||
if (linkedTable) {
|
||||
row[link.fieldName].push(processFormulas(linkedTable, linkedRow))
|
||||
}
|
||||
|
@ -199,13 +224,13 @@ export async function squashLinksToPrimaryDisplay(
|
|||
// this only fetches the table if its not already in array
|
||||
const rowTable = await getLinkedTable(row.tableId!, linkedTables)
|
||||
for (let [column, schema] of Object.entries(rowTable?.schema || {})) {
|
||||
if (schema.type !== FieldTypes.LINK || !Array.isArray(row[column])) {
|
||||
if (schema.type !== FieldType.LINK || !Array.isArray(row[column])) {
|
||||
continue
|
||||
}
|
||||
const newLinks = []
|
||||
for (let link of row[column]) {
|
||||
const linkTblId = link.tableId || getRelatedTableForField(table, column)
|
||||
const linkedTable = await getLinkedTable(linkTblId, linkedTables)
|
||||
const linkedTable = await getLinkedTable(linkTblId!, linkedTables)
|
||||
const obj: any = { _id: link._id }
|
||||
if (linkedTable?.primaryDisplay && link[linkedTable.primaryDisplay]) {
|
||||
obj.primaryDisplay = link[linkedTable.primaryDisplay]
|
||||
|
|
|
@ -31,19 +31,22 @@ export const IncludeDocs = {
|
|||
export async function getLinkDocuments(args: {
|
||||
tableId?: string
|
||||
rowId?: string
|
||||
includeDocs?: any
|
||||
fieldName?: string
|
||||
includeDocs?: boolean
|
||||
}): Promise<LinkDocumentValue[] | LinkDocument[]> {
|
||||
const { tableId, rowId, includeDocs } = args
|
||||
const { tableId, rowId, fieldName, includeDocs } = args
|
||||
const db = context.getAppDB()
|
||||
let params: any
|
||||
if (rowId != null) {
|
||||
if (rowId) {
|
||||
params = { key: [tableId, rowId] }
|
||||
}
|
||||
// only table is known
|
||||
else {
|
||||
params = { startKey: [tableId], endKey: [tableId, {}] }
|
||||
}
|
||||
params.include_docs = !!includeDocs
|
||||
if (includeDocs) {
|
||||
params.include_docs = true
|
||||
}
|
||||
try {
|
||||
let linkRows = (await db.query(getQueryIndex(ViewName.LINK), params)).rows
|
||||
// filter to get unique entries
|
||||
|
@ -63,6 +66,14 @@ export async function getLinkDocuments(args: {
|
|||
return unique
|
||||
})
|
||||
|
||||
// filter down to just the required field name
|
||||
if (fieldName) {
|
||||
linkRows = linkRows.filter(link => {
|
||||
const value = link.value as LinkDocumentValue
|
||||
return value.fieldName === fieldName
|
||||
})
|
||||
}
|
||||
// return docs if docs requested, otherwise just the value information
|
||||
if (includeDocs) {
|
||||
return linkRows.map(row => row.doc) as LinkDocument[]
|
||||
} else {
|
||||
|
@ -87,7 +98,7 @@ export function getUniqueByProp(array: any[], prop: string) {
|
|||
})
|
||||
}
|
||||
|
||||
export function getLinkedTableIDs(table: Table) {
|
||||
export function getLinkedTableIDs(table: Table): string[] {
|
||||
return Object.values(table.schema)
|
||||
.filter(isRelationshipColumn)
|
||||
.map(column => column.tableId)
|
||||
|
|
|
@ -16,6 +16,7 @@ jest.mock("../../sdk", () => ({
|
|||
import sdk from "../../sdk"
|
||||
import { Next } from "koa"
|
||||
|
||||
const tableId = utils.generateTableID()
|
||||
const mockGetView = sdk.views.get as jest.MockedFunction<typeof sdk.views.get>
|
||||
const mockGetTable = sdk.tables.getTable as jest.MockedFunction<
|
||||
typeof sdk.tables.getTable
|
||||
|
@ -41,6 +42,7 @@ class TestConfiguration {
|
|||
body: ctxRequestBody,
|
||||
}
|
||||
this.params.viewId = viewId
|
||||
this.params.sourceId = tableId
|
||||
return this.middleware(
|
||||
{
|
||||
request: this.request as any,
|
||||
|
@ -69,7 +71,7 @@ describe("trimViewRowInfo middleware", () => {
|
|||
})
|
||||
|
||||
const table: Table = {
|
||||
_id: utils.generateTableID(),
|
||||
_id: tableId,
|
||||
name: generator.word(),
|
||||
type: "table",
|
||||
schema: {
|
||||
|
|
|
@ -3,6 +3,7 @@ import * as syncApps from "./usageQuotas/syncApps"
|
|||
import * as syncRows from "./usageQuotas/syncRows"
|
||||
import * as syncPlugins from "./usageQuotas/syncPlugins"
|
||||
import * as syncUsers from "./usageQuotas/syncUsers"
|
||||
import * as syncCreators from "./usageQuotas/syncCreators"
|
||||
|
||||
/**
|
||||
* Synchronise quotas to the state of the db.
|
||||
|
@ -13,5 +14,6 @@ export const run = async () => {
|
|||
await syncRows.run()
|
||||
await syncPlugins.run()
|
||||
await syncUsers.run()
|
||||
await syncCreators.run()
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
import { users } from "@budibase/backend-core"
|
||||
import { quotas } from "@budibase/pro"
|
||||
import { QuotaUsageType, StaticQuotaName } from "@budibase/types"
|
||||
|
||||
export const run = async () => {
|
||||
const creatorCount = await users.getCreatorCount()
|
||||
console.log(`Syncing creator count: ${creatorCount}`)
|
||||
await quotas.setUsage(
|
||||
creatorCount,
|
||||
StaticQuotaName.CREATORS,
|
||||
QuotaUsageType.STATIC
|
||||
)
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
import TestConfig from "../../../../tests/utilities/TestConfiguration"
|
||||
import * as syncCreators from "../syncCreators"
|
||||
import { quotas } from "@budibase/pro"
|
||||
|
||||
describe("syncCreators", () => {
|
||||
let config = new TestConfig(false)
|
||||
|
||||
beforeEach(async () => {
|
||||
await config.init()
|
||||
})
|
||||
|
||||
afterAll(config.end)
|
||||
|
||||
it("syncs creators", async () => {
|
||||
return config.doInContext(null, async () => {
|
||||
await config.createUser({ admin: true })
|
||||
|
||||
await syncCreators.run()
|
||||
|
||||
const usageDoc = await quotas.getQuotaUsage()
|
||||
// default + additional creator
|
||||
const creatorsCount = 2
|
||||
expect(usageDoc.usageQuota.creators).toBe(creatorsCount)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -23,10 +23,13 @@ import {
|
|||
getTableParams,
|
||||
} from "../../../db/utils"
|
||||
import sdk from "../../index"
|
||||
import datasource from "../../../api/routes/datasource"
|
||||
|
||||
const ENV_VAR_PREFIX = "env."
|
||||
|
||||
export async function fetch() {
|
||||
export async function fetch(opts?: {
|
||||
enriched: boolean
|
||||
}): Promise<Datasource[]> {
|
||||
// Get internal tables
|
||||
const db = context.getAppDB()
|
||||
const internalTables = await db.allDocs(
|
||||
|
@ -44,7 +47,7 @@ export async function fetch() {
|
|||
|
||||
const bbInternalDb = {
|
||||
...BudibaseInternalDB,
|
||||
}
|
||||
} as Datasource
|
||||
|
||||
// Get external datasources
|
||||
const datasources = (
|
||||
|
@ -66,7 +69,18 @@ export async function fetch() {
|
|||
}
|
||||
}
|
||||
|
||||
return [bbInternalDb, ...datasources]
|
||||
if (opts?.enriched) {
|
||||
const envVars = await getEnvironmentVariables()
|
||||
const promises = datasources.map(datasource =>
|
||||
enrichDatasourceWithValues(datasource, envVars)
|
||||
)
|
||||
const enriched = (await Promise.all(promises)).map(
|
||||
result => result.datasource
|
||||
)
|
||||
return [bbInternalDb, ...enriched]
|
||||
} else {
|
||||
return [bbInternalDb, ...datasources]
|
||||
}
|
||||
}
|
||||
|
||||
export function areRESTVariablesValid(datasource: Datasource) {
|
||||
|
@ -107,9 +121,12 @@ export function checkDatasourceTypes(schema: Integration, config: any) {
|
|||
return config
|
||||
}
|
||||
|
||||
async function enrichDatasourceWithValues(datasource: Datasource) {
|
||||
async function enrichDatasourceWithValues(
|
||||
datasource: Datasource,
|
||||
variables?: Record<string, string>
|
||||
) {
|
||||
const cloned = cloneDeep(datasource)
|
||||
const env = await getEnvironmentVariables()
|
||||
const env = variables ? variables : await getEnvironmentVariables()
|
||||
//Do not process entities, as we do not want to process formulas
|
||||
const { entities, ...clonedWithoutEntities } = cloned
|
||||
const processed = processObjectSync(
|
||||
|
@ -235,9 +252,9 @@ export function mergeConfigs(update: Datasource, old: Datasource) {
|
|||
if (value !== PASSWORD_REPLACEMENT) {
|
||||
continue
|
||||
}
|
||||
if (old.config?.[key]) {
|
||||
if (update.config && old.config && old.config?.[key]) {
|
||||
update.config[key] = old.config?.[key]
|
||||
} else {
|
||||
} else if (update.config) {
|
||||
delete update.config[key]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,11 +7,11 @@ export async function getRow(
|
|||
rowId: string,
|
||||
opts?: { relationships?: boolean }
|
||||
) {
|
||||
const response = (await handleRequest(Operation.READ, tableId, {
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
id: breakRowIdField(rowId),
|
||||
includeSqlRelationships: opts?.relationships
|
||||
? IncludeRelationship.INCLUDE
|
||||
: IncludeRelationship.EXCLUDE,
|
||||
})) as Row[]
|
||||
})
|
||||
return response ? response[0] : response
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import { SearchFilters, SearchParams } from "@budibase/types"
|
||||
import { SearchFilters, SearchParams, Row } from "@budibase/types"
|
||||
import { isExternalTable } from "../../../integrations/utils"
|
||||
import * as internal from "./search/internal"
|
||||
import * as external from "./search/external"
|
||||
|
@ -45,7 +45,7 @@ export async function exportRows(
|
|||
return pickApi(options.tableId).exportRows(options)
|
||||
}
|
||||
|
||||
export async function fetch(tableId: string) {
|
||||
export async function fetch(tableId: string): Promise<Row[]> {
|
||||
return pickApi(tableId).fetch(tableId)
|
||||
}
|
||||
|
||||
|
@ -53,6 +53,6 @@ export async function fetchView(
|
|||
tableId: string,
|
||||
viewName: string,
|
||||
params: ViewParams
|
||||
) {
|
||||
): Promise<Row[]> {
|
||||
return pickApi(tableId).fetchView(viewName, params)
|
||||
}
|
||||
|
|
|
@ -55,15 +55,15 @@ export async function search(options: SearchParams) {
|
|||
try {
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
options = searchInputMapping(table, options)
|
||||
let rows = (await handleRequest(Operation.READ, tableId, {
|
||||
let rows = await handleRequest(Operation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: paginateObj as PaginationJson,
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})) as Row[]
|
||||
})
|
||||
let hasNextPage = false
|
||||
if (paginate && rows.length === limit) {
|
||||
const nextRows = (await handleRequest(Operation.READ, tableId, {
|
||||
const nextRows = await handleRequest(Operation.READ, tableId, {
|
||||
filters: query,
|
||||
sort,
|
||||
paginate: {
|
||||
|
@ -71,7 +71,7 @@ export async function search(options: SearchParams) {
|
|||
page: bookmark! * limit + 1,
|
||||
},
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})) as Row[]
|
||||
})
|
||||
hasNextPage = nextRows.length > 0
|
||||
}
|
||||
|
||||
|
@ -172,12 +172,18 @@ export async function exportRows(
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetch(tableId: string) {
|
||||
const response = await handleRequest(Operation.READ, tableId, {
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
})
|
||||
export async function fetch(tableId: string): Promise<Row[]> {
|
||||
const response = await handleRequest<Operation.READ>(
|
||||
Operation.READ,
|
||||
tableId,
|
||||
{
|
||||
includeSqlRelationships: IncludeRelationship.INCLUDE,
|
||||
}
|
||||
)
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
return await outputProcessing(table, response, { preserveLinks: true })
|
||||
return await outputProcessing<Row[]>(table, response, {
|
||||
preserveLinks: true,
|
||||
})
|
||||
}
|
||||
|
||||
export async function fetchView(viewName: string) {
|
||||
|
|
|
@ -6,26 +6,26 @@ import {
|
|||
import env from "../../../../environment"
|
||||
import { fullSearch, paginatedSearch } from "./internalSearch"
|
||||
import {
|
||||
InternalTables,
|
||||
getRowParams,
|
||||
DocumentType,
|
||||
getRowParams,
|
||||
InternalTables,
|
||||
} from "../../../../db/utils"
|
||||
import { getGlobalUsersFromMetadata } from "../../../../utilities/global"
|
||||
import { outputProcessing } from "../../../../utilities/rowProcessor"
|
||||
import { Database, Row, Table, SearchParams } from "@budibase/types"
|
||||
import { Database, Row, SearchParams, Table } from "@budibase/types"
|
||||
import { cleanExportRows } from "../utils"
|
||||
import {
|
||||
Format,
|
||||
csv,
|
||||
Format,
|
||||
json,
|
||||
jsonWithSchema,
|
||||
} from "../../../../api/controllers/view/exporters"
|
||||
import * as inMemoryViews from "../../../../db/inMemoryView"
|
||||
import {
|
||||
migrateToInMemoryView,
|
||||
migrateToDesignView,
|
||||
getFromDesignDoc,
|
||||
getFromMemoryDoc,
|
||||
migrateToDesignView,
|
||||
migrateToInMemoryView,
|
||||
} from "../../../../api/controllers/view/utils"
|
||||
import sdk from "../../../../sdk"
|
||||
import { ExportRowsParams, ExportRowsResult } from "../search"
|
||||
|
@ -139,13 +139,12 @@ export async function exportRows(
|
|||
}
|
||||
}
|
||||
|
||||
export async function fetch(tableId: string) {
|
||||
export async function fetch(tableId: string): Promise<Row[]> {
|
||||
const db = context.getAppDB()
|
||||
|
||||
const table = await sdk.tables.getTable(tableId)
|
||||
const rows = await getRawTableData(db, tableId)
|
||||
const result = await outputProcessing(table, rows)
|
||||
return result
|
||||
return await outputProcessing(table, rows)
|
||||
}
|
||||
|
||||
async function getRawTableData(db: Database, tableId: string) {
|
||||
|
|
|
@ -69,12 +69,15 @@ export async function validate({
|
|||
valid: boolean
|
||||
errors: Record<string, any>
|
||||
}> {
|
||||
let fetchedTable: Table
|
||||
if (!table) {
|
||||
let fetchedTable: Table | undefined
|
||||
if (!table && tableId) {
|
||||
fetchedTable = await sdk.tables.getTable(tableId)
|
||||
} else {
|
||||
} else if (table) {
|
||||
fetchedTable = table
|
||||
}
|
||||
if (fetchedTable === undefined) {
|
||||
throw new Error("Unable to fetch table for validation")
|
||||
}
|
||||
const errors: Record<string, any> = {}
|
||||
for (let fieldName of Object.keys(fetchedTable.schema)) {
|
||||
const column = fetchedTable.schema[fieldName]
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
import { context } from "@budibase/backend-core"
|
||||
import { BudibaseInternalDB, getTableParams } from "../../../db/utils"
|
||||
import {
|
||||
BudibaseInternalDB,
|
||||
getMultiIDParams,
|
||||
getTableParams,
|
||||
} from "../../../db/utils"
|
||||
import {
|
||||
breakExternalTableId,
|
||||
isExternalTable,
|
||||
isSQL,
|
||||
} from "../../../integrations/utils"
|
||||
import {
|
||||
AllDocsResponse,
|
||||
Database,
|
||||
Table,
|
||||
TableResponse,
|
||||
|
@ -15,24 +20,70 @@ import datasources from "../datasources"
|
|||
import { populateExternalTableSchemas } from "./validation"
|
||||
import sdk from "../../../sdk"
|
||||
|
||||
async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
||||
if (!db) {
|
||||
db = context.getAppDB()
|
||||
}
|
||||
const internalTables = await db.allDocs(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return internalTables.rows.map((tableDoc: any) => ({
|
||||
function processInternalTables(docs: AllDocsResponse<Table[]>): Table[] {
|
||||
return docs.rows.map((tableDoc: any) => ({
|
||||
...tableDoc.doc,
|
||||
type: "internal",
|
||||
sourceId: tableDoc.doc.sourceId || BudibaseInternalDB._id,
|
||||
}))
|
||||
}
|
||||
|
||||
async function getAllExternalTables(
|
||||
datasourceId: any
|
||||
async function getAllInternalTables(db?: Database): Promise<Table[]> {
|
||||
if (!db) {
|
||||
db = context.getAppDB()
|
||||
}
|
||||
const internalTableDocs = await db.allDocs<Table[]>(
|
||||
getTableParams(null, {
|
||||
include_docs: true,
|
||||
})
|
||||
)
|
||||
return processInternalTables(internalTableDocs)
|
||||
}
|
||||
|
||||
async function getAllExternalTables(): Promise<Table[]> {
|
||||
const datasources = await sdk.datasources.fetch({ enriched: true })
|
||||
const allEntities = datasources.map(datasource => datasource.entities)
|
||||
let final: Table[] = []
|
||||
for (let entities of allEntities) {
|
||||
if (entities) {
|
||||
final = final.concat(Object.values(entities))
|
||||
}
|
||||
}
|
||||
return final
|
||||
}
|
||||
|
||||
async function getAllTables() {
|
||||
const [internal, external] = await Promise.all([
|
||||
getAllInternalTables(),
|
||||
getAllExternalTables(),
|
||||
])
|
||||
return [...internal, external]
|
||||
}
|
||||
|
||||
async function getTables(tableIds: string[]): Promise<Table[]> {
|
||||
const externalTableIds = tableIds.filter(tableId => isExternalTable(tableId)),
|
||||
internalTableIds = tableIds.filter(tableId => !isExternalTable(tableId))
|
||||
let tables: Table[] = []
|
||||
if (externalTableIds.length) {
|
||||
const externalTables = await getAllExternalTables()
|
||||
tables = tables.concat(
|
||||
externalTables.filter(
|
||||
table => externalTableIds.indexOf(table._id!) !== -1
|
||||
)
|
||||
)
|
||||
}
|
||||
if (internalTableIds.length) {
|
||||
const db = context.getAppDB()
|
||||
const internalTableDocs = await db.allDocs<Table[]>(
|
||||
getMultiIDParams(internalTableIds)
|
||||
)
|
||||
tables = tables.concat(processInternalTables(internalTableDocs))
|
||||
}
|
||||
return tables
|
||||
}
|
||||
|
||||
async function getExternalTablesInDatasource(
|
||||
datasourceId: string
|
||||
): Promise<Record<string, Table>> {
|
||||
const datasource = await datasources.get(datasourceId, { enriched: true })
|
||||
if (!datasource || !datasource.entities) {
|
||||
|
@ -42,22 +93,22 @@ async function getAllExternalTables(
|
|||
}
|
||||
|
||||
async function getExternalTable(
|
||||
datasourceId: any,
|
||||
tableName: any
|
||||
datasourceId: string,
|
||||
tableName: string
|
||||
): Promise<Table> {
|
||||
const entities = await getAllExternalTables(datasourceId)
|
||||
const entities = await getExternalTablesInDatasource(datasourceId)
|
||||
return entities[tableName]
|
||||
}
|
||||
|
||||
async function getTable(tableId: any): Promise<Table> {
|
||||
async function getTable(tableId: string): Promise<Table> {
|
||||
const db = context.getAppDB()
|
||||
if (isExternalTable(tableId)) {
|
||||
let { datasourceId, tableName } = breakExternalTableId(tableId)
|
||||
const datasource = await datasources.get(datasourceId!)
|
||||
const table = await getExternalTable(datasourceId, tableName)
|
||||
const table = await getExternalTable(datasourceId!, tableName!)
|
||||
return { ...table, sql: isSQL(datasource) }
|
||||
} else {
|
||||
return db.get(tableId)
|
||||
return db.get<Table>(tableId)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -86,9 +137,11 @@ async function saveTable(table: Table) {
|
|||
|
||||
export default {
|
||||
getAllInternalTables,
|
||||
getAllExternalTables,
|
||||
getExternalTablesInDatasource,
|
||||
getExternalTable,
|
||||
getTable,
|
||||
getAllTables,
|
||||
getTables,
|
||||
populateExternalTableSchemas,
|
||||
enrichViewSchemas,
|
||||
saveTable,
|
||||
|
|
|
@ -210,6 +210,7 @@ export async function outputProcessing<T extends Row[] | Row>(
|
|||
opts: {
|
||||
squash?: boolean
|
||||
preserveLinks?: boolean
|
||||
fromRow?: Row
|
||||
skipBBReferences?: boolean
|
||||
} = {
|
||||
squash: true,
|
||||
|
@ -227,7 +228,9 @@ export async function outputProcessing<T extends Row[] | Row>(
|
|||
}
|
||||
// attach any linked row information
|
||||
let enriched = !opts.preserveLinks
|
||||
? await linkRows.attachFullLinkedDocs(table, safeRows)
|
||||
? await linkRows.attachFullLinkedDocs(table, safeRows, {
|
||||
fromRow: opts?.fromRow,
|
||||
})
|
||||
: safeRows
|
||||
|
||||
// process complex types: attachements, bb references...
|
||||
|
|
|
@ -6,6 +6,7 @@ import {
|
|||
InternalTable,
|
||||
} from "@budibase/types"
|
||||
import { getProdAppID } from "./applications"
|
||||
import * as _ from "lodash/fp"
|
||||
|
||||
// checks if a user is specifically a builder, given an app ID
|
||||
export function isBuilder(user: User | ContextUser, appId?: string): boolean {
|
||||
|
@ -58,6 +59,18 @@ export function hasAppBuilderPermissions(user?: User | ContextUser): boolean {
|
|||
return !isGlobalBuilder && appLength != null && appLength > 0
|
||||
}
|
||||
|
||||
export function hasAppCreatorPermissions(user?: User | ContextUser): boolean {
|
||||
if (!user) {
|
||||
return false
|
||||
}
|
||||
return _.flow(
|
||||
_.get("roles"),
|
||||
_.values,
|
||||
_.find(x => ["CREATOR", "ADMIN"].includes(x)),
|
||||
x => !!x
|
||||
)(user)
|
||||
}
|
||||
|
||||
// checks if a user is capable of building any app
|
||||
export function hasBuilderPermissions(user?: User | ContextUser): boolean {
|
||||
if (!user) {
|
||||
|
@ -74,6 +87,18 @@ export function hasAdminPermissions(user?: User | ContextUser): boolean {
|
|||
return !!user.admin?.global
|
||||
}
|
||||
|
||||
export function isCreator(user?: User | ContextUser): boolean {
|
||||
if (!user) {
|
||||
return false
|
||||
}
|
||||
return (
|
||||
isGlobalBuilder(user) ||
|
||||
hasAdminPermissions(user) ||
|
||||
hasAppBuilderPermissions(user) ||
|
||||
hasAppCreatorPermissions(user)
|
||||
)
|
||||
}
|
||||
|
||||
export function getGlobalUserID(userId?: string): string | undefined {
|
||||
if (typeof userId !== "string") {
|
||||
return userId
|
||||
|
|
|
@ -32,6 +32,7 @@ export interface StaticUsage {
|
|||
[StaticQuotaName.APPS]: number
|
||||
[StaticQuotaName.PLUGINS]: number
|
||||
[StaticQuotaName.USERS]: number
|
||||
[StaticQuotaName.CREATORS]: number
|
||||
[StaticQuotaName.USER_GROUPS]: number
|
||||
[StaticQuotaName.ROWS]: number
|
||||
triggers: {
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
export enum FeatureFlag {
|
||||
LICENSING = "LICENSING",
|
||||
// Feature IDs in Posthog
|
||||
PER_CREATOR_PER_USER_PRICE = "18873",
|
||||
PER_CREATOR_PER_USER_PRICE_ALERT = "18530",
|
||||
}
|
||||
|
||||
export interface TenantFeatureFlags {
|
||||
|
|
|
@ -5,10 +5,17 @@ export interface Customer {
|
|||
currency: string | null | undefined
|
||||
}
|
||||
|
||||
export interface SubscriptionItems {
|
||||
user: number | undefined
|
||||
creator: number | undefined
|
||||
}
|
||||
|
||||
export interface Subscription {
|
||||
amount: number
|
||||
amounts: SubscriptionItems | undefined
|
||||
currency: string
|
||||
quantity: number
|
||||
quantities: SubscriptionItems | undefined
|
||||
duration: PriceDuration
|
||||
cancelAt: number | null | undefined
|
||||
currentPeriodStart: number
|
||||
|
|
|
@ -4,7 +4,9 @@ export enum PlanType {
|
|||
PRO = "pro",
|
||||
/** @deprecated */
|
||||
TEAM = "team",
|
||||
/** @deprecated */
|
||||
PREMIUM = "premium",
|
||||
PREMIUM_PLUS = "premium_plus",
|
||||
BUSINESS = "business",
|
||||
ENTERPRISE = "enterprise",
|
||||
}
|
||||
|
@ -26,10 +28,12 @@ export interface AvailablePrice {
|
|||
currency: string
|
||||
duration: PriceDuration
|
||||
priceId: string
|
||||
type?: string
|
||||
}
|
||||
|
||||
export enum PlanModel {
|
||||
PER_USER = "perUser",
|
||||
PER_CREATOR_PER_USER = "per_creator_per_user",
|
||||
DAY_PASS = "dayPass",
|
||||
}
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ export enum StaticQuotaName {
|
|||
ROWS = "rows",
|
||||
APPS = "apps",
|
||||
USERS = "users",
|
||||
CREATORS = "creators",
|
||||
USER_GROUPS = "userGroups",
|
||||
PLUGINS = "plugins",
|
||||
}
|
||||
|
@ -67,6 +68,7 @@ export type StaticQuotas = {
|
|||
[StaticQuotaName.ROWS]: Quota
|
||||
[StaticQuotaName.APPS]: Quota
|
||||
[StaticQuotaName.USERS]: Quota
|
||||
[StaticQuotaName.CREATORS]: Quota
|
||||
[StaticQuotaName.USER_GROUPS]: Quota
|
||||
[StaticQuotaName.PLUGINS]: Quota
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue