Merge branch 'master' into update-docker-compose-for-sqs

This commit is contained in:
Sam Rose 2024-10-14 14:28:52 +01:00 committed by GitHub
commit da12da6fe1
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
201 changed files with 10025 additions and 5399 deletions

View File

@ -3,26 +3,50 @@ name: deploy-featurebranch
on:
pull_request:
types: [
labeled,
# default types below (https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request)
opened,
synchronize,
reopened,
]
labeled,
# default types below (https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request)
opened,
synchronize,
reopened,
]
jobs:
release:
if: |
(github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == 'Budibase/budibase') &&
contains(github.event.pull_request.labels.*.name, 'feature-branch')
(
contains(github.event.pull_request.labels.*.name, 'feature-branch') ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-pro') ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-team') ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-business') ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-enterprise')
)
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set PAYLOAD_LICENSE_TYPE
id: set_license_type
run: |
if [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=free" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-pro') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=pro" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-team') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=team" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-business') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=business" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-enterprise') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=enterprise" >> $GITHUB_ENV
else
echo "PAYLOAD_LICENSE_TYPE=free" >> $GITHUB_ENV
fi
- uses: passeidireto/trigger-external-workflow-action@main
env:
PAYLOAD_BRANCH: ${{ github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
PAYLOAD_LICENSE_TYPE: "free"
PAYLOAD_LICENSE_TYPE: ${{ env.PAYLOAD_LICENSE_TYPE }}
with:
repository: budibase/budibase-deploys
event: featurebranch-qa-deploy

View File

@ -1,3 +1,3 @@
nodejs 20.10.0
python 3.10.0
yarn 1.22.19
yarn 1.22.22

View File

@ -184,6 +184,10 @@ spec:
- name: NODE_DEBUG
value: {{ .Values.services.apps.nodeDebug | quote }}
{{ end }}
{{ if .Values.services.apps.xssSafeMode }}
- name: XSS_SAFE_MODE
value: {{ .Values.services.apps.xssSafeMode | quote }}
{{ end }}
{{ if .Values.globals.datadogApmEnabled }}
- name: DD_LOGS_INJECTION
value: {{ .Values.globals.datadogApmEnabled | quote }}

View File

@ -1,6 +1,6 @@
{
"$schema": "node_modules/lerna/schemas/lerna-schema.json",
"version": "2.32.8",
"version": "2.32.17",
"npmClient": "yarn",
"packages": [
"packages/*",

@ -1 +1 @@
Subproject commit 558a32dfd1f55bd894804a503e7e1090937df88c
Subproject commit 8cd052ce8288f343812a514d06c5a9459b3ba1a8

View File

@ -10,7 +10,7 @@ import {
StaticDatabases,
DEFAULT_TENANT_ID,
} from "../constants"
import { Database, IdentityContext, Snippet, App } from "@budibase/types"
import { Database, IdentityContext, Snippet, App, Table } from "@budibase/types"
import { ContextMap } from "./types"
let TEST_APP_ID: string | null = null
@ -253,6 +253,11 @@ export function getAppId(): string | undefined {
}
}
export function getIP(): string | undefined {
const context = Context.get()
return context?.ip
}
export const getProdAppId = () => {
const appId = getAppId()
if (!appId) {
@ -281,6 +286,10 @@ export function doInScimContext(task: any) {
return newContext(updates, task)
}
export function doInIPContext(ip: string, task: any) {
return newContext({ ip }, task)
}
export async function ensureSnippetContext(enabled = !env.isTest()) {
const ctx = getCurrentContext()
@ -394,3 +403,20 @@ export function setFeatureFlags(key: string, value: Record<string, any>) {
context.featureFlagCache ??= {}
context.featureFlagCache[key] = value
}
export function getTableForView(viewId: string): Table | undefined {
const context = getCurrentContext()
if (!context) {
return
}
return context.viewToTableCache?.[viewId]
}
export function setTableForView(viewId: string, table: Table) {
const context = getCurrentContext()
if (!context) {
return
}
context.viewToTableCache ??= {}
context.viewToTableCache[viewId] = table
}

View File

@ -1,4 +1,4 @@
import { IdentityContext, Snippet, VM } from "@budibase/types"
import { IdentityContext, Snippet, Table, VM } from "@budibase/types"
import { OAuth2Client } from "google-auth-library"
import { GoogleSpreadsheet } from "google-spreadsheet"
@ -9,6 +9,7 @@ export type ContextMap = {
identity?: IdentityContext
environmentVariables?: Record<string, string>
isScim?: boolean
ip?: string
automationId?: string
isMigrating?: boolean
vm?: VM
@ -21,4 +22,5 @@ export type ContextMap = {
featureFlagCache?: {
[key: string]: Record<string, any>
}
viewToTableCache?: Record<string, Table>
}

View File

@ -211,19 +211,34 @@ export class DatabaseImpl implements Database {
})
}
async tryGet<T extends Document>(id?: string): Promise<T | undefined> {
try {
return await this.get<T>(id)
} catch (err: any) {
if (err.statusCode === 404) {
return undefined
}
throw err
}
}
async getMultiple<T extends Document>(
ids: string[],
opts?: { allowMissing?: boolean }
opts?: { allowMissing?: boolean; excludeDocs?: boolean }
): Promise<T[]> {
// get unique
ids = [...new Set(ids)]
const includeDocs = !opts?.excludeDocs
const response = await this.allDocs<T>({
keys: ids,
include_docs: true,
include_docs: includeDocs,
})
const rowUnavailable = (row: RowResponse<T>) => {
// row is deleted - key lookup can return this
if (row.doc == null || ("deleted" in row.value && row.value.deleted)) {
if (
(includeDocs && row.doc == null) ||
(row.value && "deleted" in row.value && row.value.deleted)
) {
return true
}
return row.error === "not_found"
@ -237,7 +252,7 @@ export class DatabaseImpl implements Database {
const missingIds = missing.map(row => row.key).join(", ")
throw new Error(`Unable to get documents: ${missingIds}`)
}
return rows.map(row => row.doc!)
return rows.map(row => (includeDocs ? row.doc! : row.value))
}
async remove(idOrDoc: string | Document, rev?: string) {
@ -371,11 +386,21 @@ export class DatabaseImpl implements Database {
return this.performCall(() => {
return async () => {
const response = await directCouchUrlCall(args)
const json = await response.json()
const text = await response.text()
if (response.status > 300) {
let json
try {
json = JSON.parse(text)
} catch (err) {
console.error(`SQS error: ${text}`)
throw new CouchDBError(
"error while running SQS query, please try again later",
{ name: "sqs_error", status: response.status }
)
}
throw json
}
return json as T
return JSON.parse(text) as T
}
})
}

View File

@ -42,6 +42,13 @@ export class DDInstrumentedDatabase implements Database {
})
}
tryGet<T extends Document>(id?: string | undefined): Promise<T | undefined> {
return tracer.trace("db.tryGet", span => {
span?.addTags({ db_name: this.name, doc_id: id })
return this.db.tryGet(id)
})
}
getMultiple<T extends Document>(
ids: string[],
opts?: { allowMissing?: boolean | undefined } | undefined

View File

@ -612,7 +612,6 @@ async function runQuery<T>(
* limit {number} The number of results to fetch
* bookmark {string|null} Current bookmark in the recursive search
* rows {array|null} Current results in the recursive search
* @returns {Promise<*[]|*>}
*/
async function recursiveSearch<T>(
dbName: string,

View File

@ -6,7 +6,7 @@ import {
ViewName,
} from "../constants"
import { getProdAppID } from "./conversions"
import { DatabaseQueryOpts } from "@budibase/types"
import { DatabaseQueryOpts, VirtualDocumentType } from "@budibase/types"
/**
* If creating DB allDocs/query params with only a single top level ID this can be used, this
@ -66,9 +66,8 @@ export function getQueryIndex(viewName: ViewName) {
/**
* Check if a given ID is that of a table.
* @returns {boolean}
*/
export const isTableId = (id: string) => {
export const isTableId = (id: string): boolean => {
// this includes datasource plus tables
return (
!!id &&
@ -77,13 +76,16 @@ export const isTableId = (id: string) => {
)
}
export function isViewId(id: string): boolean {
return !!id && id.startsWith(`${VirtualDocumentType.VIEW}${SEPARATOR}`)
}
/**
* Check if a given ID is that of a datasource or datasource plus.
* @returns {boolean}
*/
export const isDatasourceId = (id: string) => {
export const isDatasourceId = (id: string): boolean => {
// this covers both datasources and datasource plus
return id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
return !!id && id.startsWith(`${DocumentType.DATASOURCE}${SEPARATOR}`)
}
/**

View File

@ -0,0 +1,279 @@
import env from "../environment"
import * as crypto from "crypto"
import * as context from "../context"
import { PostHog, PostHogOptions } from "posthog-node"
import { FeatureFlag } from "@budibase/types"
import tracer from "dd-trace"
import { Duration } from "../utils"
let posthog: PostHog | undefined
export function init(opts?: PostHogOptions) {
if (
env.POSTHOG_TOKEN &&
env.POSTHOG_API_HOST &&
!env.SELF_HOSTED &&
env.POSTHOG_FEATURE_FLAGS_ENABLED
) {
console.log("initializing posthog client...")
posthog = new PostHog(env.POSTHOG_TOKEN, {
host: env.POSTHOG_API_HOST,
personalApiKey: env.POSTHOG_PERSONAL_TOKEN,
featureFlagsPollingInterval: Duration.fromMinutes(3).toMs(),
...opts,
})
} else {
console.log("posthog disabled")
}
}
export function shutdown() {
posthog?.shutdown()
}
export abstract class Flag<T> {
static boolean(defaultValue: boolean): Flag<boolean> {
return new BooleanFlag(defaultValue)
}
static string(defaultValue: string): Flag<string> {
return new StringFlag(defaultValue)
}
static number(defaultValue: number): Flag<number> {
return new NumberFlag(defaultValue)
}
protected constructor(public defaultValue: T) {}
abstract parse(value: any): T
}
type UnwrapFlag<F> = F extends Flag<infer U> ? U : never
export type FlagValues<T> = {
[K in keyof T]: UnwrapFlag<T[K]>
}
type KeysOfType<T, U> = {
[K in keyof T]: T[K] extends Flag<U> ? K : never
}[keyof T]
class BooleanFlag extends Flag<boolean> {
parse(value: any) {
if (typeof value === "string") {
return ["true", "t", "1"].includes(value.toLowerCase())
}
if (typeof value === "boolean") {
return value
}
throw new Error(`could not parse value "${value}" as boolean`)
}
}
class StringFlag extends Flag<string> {
parse(value: any) {
if (typeof value === "string") {
return value
}
throw new Error(`could not parse value "${value}" as string`)
}
}
class NumberFlag extends Flag<number> {
parse(value: any) {
if (typeof value === "number") {
return value
}
if (typeof value === "string") {
const parsed = parseFloat(value)
if (!isNaN(parsed)) {
return parsed
}
}
throw new Error(`could not parse value "${value}" as number`)
}
}
export interface EnvFlagEntry {
tenantId: string
key: string
value: boolean
}
export function parseEnvFlags(flags: string): EnvFlagEntry[] {
const split = flags.split(",").map(x => x.split(":"))
const result: EnvFlagEntry[] = []
for (const [tenantId, ...features] of split) {
for (let feature of features) {
let value = true
if (feature.startsWith("!")) {
feature = feature.slice(1)
value = false
}
result.push({ tenantId, key: feature, value })
}
}
return result
}
export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
// This is used to safely cache flags sets in the current request context.
// Because multiple sets could theoretically exist, we don't want the cache of
// one to leak into another.
private readonly setId: string
constructor(private readonly flagSchema: T) {
this.setId = crypto.randomUUID()
}
defaults(): FlagValues<T> {
return Object.keys(this.flagSchema).reduce((acc, key) => {
const typedKey = key as keyof T
acc[typedKey] = this.flagSchema[key].defaultValue
return acc
}, {} as FlagValues<T>)
}
isFlagName(name: string | number | symbol): name is keyof T {
return this.flagSchema[name as keyof T] !== undefined
}
async get<K extends keyof T>(key: K): Promise<FlagValues<T>[K]> {
const flags = await this.fetch()
return flags[key]
}
async isEnabled<K extends KeysOfType<T, boolean>>(key: K): Promise<boolean> {
const flags = await this.fetch()
return flags[key]
}
async fetch(): Promise<FlagValues<T>> {
return await tracer.trace("features.fetch", async span => {
const cachedFlags = context.getFeatureFlags<FlagValues<T>>(this.setId)
if (cachedFlags) {
span?.addTags({ fromCache: true })
return cachedFlags
}
const tags: Record<string, any> = {}
const flagValues = this.defaults()
const currentTenantId = context.getTenantId()
const specificallySetFalse = new Set<string>()
for (const { tenantId, key, value } of parseEnvFlags(
env.TENANT_FEATURE_FLAGS || ""
)) {
if (!tenantId || (tenantId !== "*" && tenantId !== currentTenantId)) {
continue
}
tags[`readFromEnvironmentVars`] = true
if (value === false) {
specificallySetFalse.add(key)
}
// ignore unknown flags
if (!this.isFlagName(key)) {
continue
}
if (typeof flagValues[key] !== "boolean") {
throw new Error(`Feature: ${key} is not a boolean`)
}
// @ts-expect-error - TS does not like you writing into a generic type,
// but we know that it's okay in this case because it's just an object.
flagValues[key as keyof FlagValues] = value
tags[`flags.${key}.source`] = "environment"
}
const identity = context.getIdentity()
let userId = identity?._id
if (!userId) {
const ip = context.getIP()
if (ip) {
userId = crypto.createHash("sha512").update(ip).digest("hex")
}
}
let tenantId = identity?.tenantId
if (!tenantId) {
tenantId = currentTenantId
}
tags[`identity.type`] = identity?.type
tags[`identity._id`] = identity?._id
tags[`tenantId`] = tenantId
tags[`userId`] = userId
if (posthog && userId) {
tags[`readFromPostHog`] = true
const personProperties: Record<string, string> = { tenantId }
const posthogFlags = await posthog.getAllFlagsAndPayloads(userId, {
personProperties,
})
for (const [name, value] of Object.entries(posthogFlags.featureFlags)) {
if (!this.isFlagName(name)) {
// We don't want an unexpected PostHog flag to break the app, so we
// just log it and continue.
console.warn(`Unexpected posthog flag "${name}": ${value}`)
continue
}
if (flagValues[name] === true || specificallySetFalse.has(name)) {
// If the flag is already set to through environment variables, we
// don't want to override it back to false here.
continue
}
const payload = posthogFlags.featureFlagPayloads?.[name]
const flag = this.flagSchema[name]
try {
// @ts-expect-error - TS does not like you writing into a generic
// type, but we know that it's okay in this case because it's just
// an object.
flagValues[name] = flag.parse(payload || value)
tags[`flags.${name}.source`] = "posthog"
} catch (err) {
// We don't want an invalid PostHog flag to break the app, so we just
// log it and continue.
console.warn(`Error parsing posthog flag "${name}": ${value}`, err)
}
}
}
context.setFeatureFlags(this.setId, flagValues)
for (const [key, value] of Object.entries(flagValues)) {
tags[`flags.${key}.value`] = value
}
span?.addTags(tags)
return flagValues
})
}
}
// This is the primary source of truth for feature flags. If you want to add a
// new flag, add it here and use the `fetch` and `get` functions to access it.
// All of the machinery in this file is to make sure that flags have their
// default values set correctly and their types flow through the system.
export const flags = new FlagSet({
DEFAULT_VALUES: Flag.boolean(env.isDev()),
AUTOMATION_BRANCHING: Flag.boolean(env.isDev()),
SQS: Flag.boolean(env.isDev()),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()),
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(env.isDev()),
[FeatureFlag.TABLES_DEFAULT_ADMIN]: Flag.boolean(env.isDev()),
})
type UnwrapPromise<T> = T extends Promise<infer U> ? U : T
export type FeatureFlags = UnwrapPromise<ReturnType<typeof flags.fetch>>

View File

@ -1,281 +1,2 @@
import env from "../environment"
import * as context from "../context"
import { PostHog, PostHogOptions } from "posthog-node"
import { FeatureFlag, IdentityType, UserCtx } from "@budibase/types"
import tracer from "dd-trace"
import { Duration } from "../utils"
let posthog: PostHog | undefined
export function init(opts?: PostHogOptions) {
if (
env.POSTHOG_TOKEN &&
env.POSTHOG_API_HOST &&
!env.SELF_HOSTED &&
env.POSTHOG_FEATURE_FLAGS_ENABLED
) {
console.log("initializing posthog client...")
posthog = new PostHog(env.POSTHOG_TOKEN, {
host: env.POSTHOG_API_HOST,
personalApiKey: env.POSTHOG_PERSONAL_TOKEN,
featureFlagsPollingInterval: Duration.fromMinutes(3).toMs(),
...opts,
})
} else {
console.log("posthog disabled")
}
}
export function shutdown() {
posthog?.shutdown()
}
export abstract class Flag<T> {
static boolean(defaultValue: boolean): Flag<boolean> {
return new BooleanFlag(defaultValue)
}
static string(defaultValue: string): Flag<string> {
return new StringFlag(defaultValue)
}
static number(defaultValue: number): Flag<number> {
return new NumberFlag(defaultValue)
}
protected constructor(public defaultValue: T) {}
abstract parse(value: any): T
}
type UnwrapFlag<F> = F extends Flag<infer U> ? U : never
export type FlagValues<T> = {
[K in keyof T]: UnwrapFlag<T[K]>
}
type KeysOfType<T, U> = {
[K in keyof T]: T[K] extends Flag<U> ? K : never
}[keyof T]
class BooleanFlag extends Flag<boolean> {
parse(value: any) {
if (typeof value === "string") {
return ["true", "t", "1"].includes(value.toLowerCase())
}
if (typeof value === "boolean") {
return value
}
throw new Error(`could not parse value "${value}" as boolean`)
}
}
class StringFlag extends Flag<string> {
parse(value: any) {
if (typeof value === "string") {
return value
}
throw new Error(`could not parse value "${value}" as string`)
}
}
class NumberFlag extends Flag<number> {
parse(value: any) {
if (typeof value === "number") {
return value
}
if (typeof value === "string") {
const parsed = parseFloat(value)
if (!isNaN(parsed)) {
return parsed
}
}
throw new Error(`could not parse value "${value}" as number`)
}
}
export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
// This is used to safely cache flags sets in the current request context.
// Because multiple sets could theoretically exist, we don't want the cache of
// one to leak into another.
private readonly setId: string
constructor(private readonly flagSchema: T) {
this.setId = crypto.randomUUID()
}
defaults(): FlagValues<T> {
return Object.keys(this.flagSchema).reduce((acc, key) => {
const typedKey = key as keyof T
acc[typedKey] = this.flagSchema[key].defaultValue
return acc
}, {} as FlagValues<T>)
}
isFlagName(name: string | number | symbol): name is keyof T {
return this.flagSchema[name as keyof T] !== undefined
}
async get<K extends keyof T>(
key: K,
ctx?: UserCtx
): Promise<FlagValues<T>[K]> {
const flags = await this.fetch(ctx)
return flags[key]
}
async isEnabled<K extends KeysOfType<T, boolean>>(
key: K,
ctx?: UserCtx
): Promise<boolean> {
const flags = await this.fetch(ctx)
return flags[key]
}
async fetch(ctx?: UserCtx): Promise<FlagValues<T>> {
return await tracer.trace("features.fetch", async span => {
const cachedFlags = context.getFeatureFlags<FlagValues<T>>(this.setId)
if (cachedFlags) {
span?.addTags({ fromCache: true })
return cachedFlags
}
const tags: Record<string, any> = {}
const flagValues = this.defaults()
const currentTenantId = context.getTenantId()
const specificallySetFalse = new Set<string>()
const split = (env.TENANT_FEATURE_FLAGS || "")
.split(",")
.map(x => x.split(":"))
for (const [tenantId, ...features] of split) {
if (!tenantId || (tenantId !== "*" && tenantId !== currentTenantId)) {
continue
}
tags[`readFromEnvironmentVars`] = true
for (let feature of features) {
let value = true
if (feature.startsWith("!")) {
feature = feature.slice(1)
value = false
specificallySetFalse.add(feature)
}
// ignore unknown flags
if (!this.isFlagName(feature)) {
continue
}
if (typeof flagValues[feature] !== "boolean") {
throw new Error(`Feature: ${feature} is not a boolean`)
}
// @ts-expect-error - TS does not like you writing into a generic type,
// but we know that it's okay in this case because it's just an object.
flagValues[feature as keyof FlagValues] = value
tags[`flags.${feature}.source`] = "environment"
}
}
const license = ctx?.user?.license
if (license) {
tags[`readFromLicense`] = true
for (const feature of license.features) {
if (!this.isFlagName(feature)) {
continue
}
if (
flagValues[feature] === true ||
specificallySetFalse.has(feature)
) {
// If the flag is already set to through environment variables, we
// don't want to override it back to false here.
continue
}
// @ts-expect-error - TS does not like you writing into a generic type,
// but we know that it's okay in this case because it's just an object.
flagValues[feature] = true
tags[`flags.${feature}.source`] = "license"
}
}
const identity = context.getIdentity()
tags[`identity.type`] = identity?.type
tags[`identity.tenantId`] = identity?.tenantId
tags[`identity._id`] = identity?._id
if (posthog && identity?.type === IdentityType.USER) {
tags[`readFromPostHog`] = true
const personProperties: Record<string, string> = {}
if (identity.tenantId) {
personProperties.tenantId = identity.tenantId
}
const posthogFlags = await posthog.getAllFlagsAndPayloads(
identity._id,
{
personProperties,
}
)
for (const [name, value] of Object.entries(posthogFlags.featureFlags)) {
if (!this.isFlagName(name)) {
// We don't want an unexpected PostHog flag to break the app, so we
// just log it and continue.
console.warn(`Unexpected posthog flag "${name}": ${value}`)
continue
}
if (flagValues[name] === true || specificallySetFalse.has(name)) {
// If the flag is already set to through environment variables, we
// don't want to override it back to false here.
continue
}
const payload = posthogFlags.featureFlagPayloads?.[name]
const flag = this.flagSchema[name]
try {
// @ts-expect-error - TS does not like you writing into a generic
// type, but we know that it's okay in this case because it's just
// an object.
flagValues[name] = flag.parse(payload || value)
tags[`flags.${name}.source`] = "posthog"
} catch (err) {
// We don't want an invalid PostHog flag to break the app, so we just
// log it and continue.
console.warn(`Error parsing posthog flag "${name}": ${value}`, err)
}
}
}
context.setFeatureFlags(this.setId, flagValues)
for (const [key, value] of Object.entries(flagValues)) {
tags[`flags.${key}.value`] = value
}
span?.addTags(tags)
return flagValues
})
}
}
// This is the primary source of truth for feature flags. If you want to add a
// new flag, add it here and use the `fetch` and `get` functions to access it.
// All of the machinery in this file is to make sure that flags have their
// default values set correctly and their types flow through the system.
export const flags = new FlagSet({
DEFAULT_VALUES: Flag.boolean(env.isDev()),
AUTOMATION_BRANCHING: Flag.boolean(env.isDev()),
SQS: Flag.boolean(env.isDev()),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()),
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(false),
})
export * from "./features"
export * as testutils from "./tests/utils"

View File

@ -1,9 +1,10 @@
import { IdentityContext, IdentityType, UserCtx } from "@budibase/types"
import { IdentityContext, IdentityType } from "@budibase/types"
import { Flag, FlagSet, FlagValues, init, shutdown } from "../"
import * as context from "../../context"
import environment, { withEnv } from "../../environment"
import nodeFetch from "node-fetch"
import nock from "nock"
import * as crypto from "crypto"
const schema = {
TEST_BOOLEAN: Flag.boolean(false),
@ -17,7 +18,6 @@ interface TestCase {
identity?: Partial<IdentityContext>
environmentFlags?: string
posthogFlags?: PostHogFlags
licenseFlags?: Array<string>
expected?: Partial<FlagValues<typeof schema>>
errorMessage?: string | RegExp
}
@ -27,10 +27,14 @@ interface PostHogFlags {
featureFlagPayloads?: Record<string, string>
}
function mockPosthogFlags(flags: PostHogFlags) {
function mockPosthogFlags(
flags: PostHogFlags,
opts?: { token?: string; distinct_id?: string }
) {
const { token = "test", distinct_id = "us_1234" } = opts || {}
nock("https://us.i.posthog.com")
.post("/decide/?v=3", body => {
return body.token === "test" && body.distinct_id === "us_1234"
return body.token === token && body.distinct_id === distinct_id
})
.reply(200, flags)
.persist()
@ -112,17 +116,6 @@ describe("feature flags", () => {
},
expected: { TEST_BOOLEAN: true },
},
{
it: "should be able to set boolean flags through the license",
licenseFlags: ["TEST_BOOLEAN"],
expected: { TEST_BOOLEAN: true },
},
{
it: "should not be able to override a negative environment flag from license",
environmentFlags: "default:!TEST_BOOLEAN",
licenseFlags: ["TEST_BOOLEAN"],
expected: { TEST_BOOLEAN: false },
},
{
it: "should not error on unrecognised PostHog flag",
posthogFlags: {
@ -130,18 +123,12 @@ describe("feature flags", () => {
},
expected: flags.defaults(),
},
{
it: "should not error on unrecognised license flag",
licenseFlags: ["UNDEFINED"],
expected: flags.defaults(),
},
])(
"$it",
async ({
identity,
environmentFlags,
posthogFlags,
licenseFlags,
expected,
errorMessage,
}) => {
@ -157,8 +144,6 @@ describe("feature flags", () => {
env.POSTHOG_API_HOST = "https://us.i.posthog.com"
}
const ctx = { user: { license: { features: licenseFlags || [] } } }
await withEnv(env, async () => {
// We need to pass in node-fetch here otherwise nock won't get used
// because posthog-node uses axios under the hood.
@ -180,18 +165,13 @@ describe("feature flags", () => {
await context.doInIdentityContext(fullIdentity, async () => {
if (errorMessage) {
await expect(flags.fetch(ctx as UserCtx)).rejects.toThrow(
errorMessage
)
await expect(flags.fetch()).rejects.toThrow(errorMessage)
} else if (expected) {
const values = await flags.fetch(ctx as UserCtx)
const values = await flags.fetch()
expect(values).toMatchObject(expected)
for (const [key, expectedValue] of Object.entries(expected)) {
const value = await flags.get(
key as keyof typeof schema,
ctx as UserCtx
)
const value = await flags.get(key as keyof typeof schema)
expect(value).toBe(expectedValue)
}
} else {
@ -214,6 +194,14 @@ describe("feature flags", () => {
lastName: "User",
}
// We need to pass in node-fetch here otherwise nock won't get used
// because posthog-node uses axios under the hood.
init({
fetch: (url, opts) => {
return nodeFetch(url, opts)
},
})
nock("https://us.i.posthog.com")
.post("/decide/?v=3", body => {
return body.token === "test" && body.distinct_id === "us_1234"
@ -230,4 +218,44 @@ describe("feature flags", () => {
}
)
})
it("should still get flags when user is logged out", async () => {
const env: Partial<typeof environment> = {
SELF_HOSTED: false,
POSTHOG_FEATURE_FLAGS_ENABLED: "true",
POSTHOG_API_HOST: "https://us.i.posthog.com",
POSTHOG_TOKEN: "test",
}
const ip = "127.0.0.1"
const hashedIp = crypto.createHash("sha512").update(ip).digest("hex")
await withEnv(env, async () => {
mockPosthogFlags(
{
featureFlags: { TEST_BOOLEAN: true },
},
{
distinct_id: hashedIp,
}
)
// We need to pass in node-fetch here otherwise nock won't get used
// because posthog-node uses axios under the hood.
init({
fetch: (url, opts) => {
return nodeFetch(url, opts)
},
})
await context.doInIPContext(ip, async () => {
await context.doInTenant("default", async () => {
const result = await flags.fetch()
expect(result.TEST_BOOLEAN).toBe(true)
})
})
shutdown()
})
})
})

View File

@ -0,0 +1,64 @@
import { FeatureFlags, parseEnvFlags } from ".."
import { setEnv } from "../../environment"
function getCurrentFlags(): Record<string, Record<string, boolean>> {
const result: Record<string, Record<string, boolean>> = {}
for (const { tenantId, key, value } of parseEnvFlags(
process.env.TENANT_FEATURE_FLAGS || ""
)) {
const tenantFlags = result[tenantId] || {}
// Don't allow overwriting specifically false flags, to match the beheaviour
// of FlagSet.
if (tenantFlags[key] === false) {
continue
}
tenantFlags[key] = value
result[tenantId] = tenantFlags
}
return result
}
function buildFlagString(
flags: Record<string, Record<string, boolean>>
): string {
const parts: string[] = []
for (const [tenantId, tenantFlags] of Object.entries(flags)) {
for (const [key, value] of Object.entries(tenantFlags)) {
if (value === false) {
parts.push(`${tenantId}:!${key}`)
} else {
parts.push(`${tenantId}:${key}`)
}
}
}
return parts.join(",")
}
export function setFeatureFlags(
tenantId: string,
flags: Partial<FeatureFlags>
): () => void {
const current = getCurrentFlags()
for (const [key, value] of Object.entries(flags)) {
const tenantFlags = current[tenantId] || {}
tenantFlags[key] = value
current[tenantId] = tenantFlags
}
const flagString = buildFlagString(current)
return setEnv({ TENANT_FEATURE_FLAGS: flagString })
}
export function withFeatureFlags<T>(
tenantId: string,
flags: Partial<FeatureFlags>,
f: () => T
) {
const cleanup = setFeatureFlags(tenantId, flags)
const result = f()
if (result instanceof Promise) {
return result.finally(cleanup)
} else {
cleanup()
return result
}
}

View File

@ -20,3 +20,4 @@ export { default as correlation } from "../logging/correlation/middleware"
export { default as errorHandling } from "./errorHandling"
export { default as querystringToBody } from "./querystringToBody"
export * as joiValidator from "./joi-validator"
export { default as ip } from "./ip"

View File

@ -0,0 +1,12 @@
import { Ctx } from "@budibase/types"
import { doInIPContext } from "../context"
export default async (ctx: Ctx, next: any) => {
if (ctx.ip) {
return await doInIPContext(ctx.ip, () => {
return next()
})
} else {
return next()
}
}

View File

@ -2,7 +2,6 @@ import { generateGlobalUserID } from "../../../db"
import { authError } from "../utils"
import * as users from "../../../users"
import * as context from "../../../context"
import fetch from "node-fetch"
import {
SaveSSOUserFunction,
SSOAuthDetails,
@ -97,28 +96,13 @@ export async function authenticate(
return done(null, ssoUser)
}
async function getProfilePictureUrl(user: User, details: SSOAuthDetails) {
const pictureUrl = details.profile?._json.picture
if (pictureUrl) {
const response = await fetch(pictureUrl)
if (response.status === 200) {
const type = response.headers.get("content-type") as string
if (type.startsWith("image/")) {
return pictureUrl
}
}
}
}
/**
* @returns a user that has been sync'd with third party information
*/
async function syncUser(user: User, details: SSOAuthDetails): Promise<SSOUser> {
let firstName
let lastName
let pictureUrl
let oauth2
let thirdPartyProfile
if (details.profile) {
const profile = details.profile
@ -134,12 +118,6 @@ async function syncUser(user: User, details: SSOAuthDetails): Promise<SSOUser> {
lastName = name.familyName
}
}
pictureUrl = await getProfilePictureUrl(user, details)
thirdPartyProfile = {
...profile._json,
}
}
// oauth tokens for future use
@ -155,8 +133,6 @@ async function syncUser(user: User, details: SSOAuthDetails): Promise<SSOUser> {
providerType: details.providerType,
firstName,
lastName,
thirdPartyProfile,
pictureUrl,
oauth2,
}
}

View File

@ -65,7 +65,13 @@ export enum BuiltinPermissionID {
POWER = "power",
}
export const BUILTIN_PERMISSIONS = {
export const BUILTIN_PERMISSIONS: {
[key in keyof typeof BuiltinPermissionID]: {
_id: (typeof BuiltinPermissionID)[key]
name: string
permissions: Permission[]
}
} = {
PUBLIC: {
_id: BuiltinPermissionID.PUBLIC,
name: "Public",

View File

@ -435,7 +435,9 @@ export function getExternalRoleID(roleId: string, version?: string) {
roleId.startsWith(DocumentType.ROLE) &&
(isBuiltin(roleId) || version === RoleIDVersion.NAME)
) {
return roleId.split(`${DocumentType.ROLE}${SEPARATOR}`)[1]
const parts = roleId.split(SEPARATOR)
parts.shift()
return parts.join(SEPARATOR)
}
return roleId
}

View File

@ -1,5 +1,5 @@
export * as utils from "./utils"
export { default as Sql } from "./sql"
export { default as Sql, COUNT_FIELD_NAME } from "./sql"
export { default as SqlTable } from "./sqlTable"
export * as designDoc from "./designDoc"

View File

@ -11,10 +11,12 @@ import {
} from "./utils"
import SqlTableQueryBuilder from "./sqlTable"
import {
Aggregation,
AnySearchFilter,
ArrayOperator,
BasicOperator,
BBReferenceFieldMetadata,
CalculationType,
FieldSchema,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
@ -41,6 +43,8 @@ import { cloneDeep } from "lodash"
type QueryFunction = (query: SqlQuery | SqlQuery[], operation: Operation) => any
export const COUNT_FIELD_NAME = "__bb_total"
function getBaseLimit() {
const envLimit = environment.SQL_MAX_ROWS
? parseInt(environment.SQL_MAX_ROWS)
@ -69,18 +73,6 @@ function prioritisedArraySort(toSort: string[], priorities: string[]) {
})
}
function getTableName(table?: Table): string | undefined {
// SQS uses the table ID rather than the table name
if (
table?.sourceType === TableSourceType.INTERNAL ||
table?.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
return table?._id
} else {
return table?.name
}
}
function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
if (Array.isArray(query)) {
return query.map((q: SqlQuery) => convertBooleans(q) as SqlQuery)
@ -97,6 +89,13 @@ function convertBooleans(query: SqlQuery | SqlQuery[]): SqlQuery | SqlQuery[] {
return query
}
function isSqs(table: Table): boolean {
return (
table.sourceType === TableSourceType.INTERNAL ||
table.sourceId === INTERNAL_TABLE_SOURCE_ID
)
}
class InternalBuilder {
private readonly client: SqlClient
private readonly query: QueryJson
@ -140,29 +139,61 @@ class InternalBuilder {
return this.table.schema[column]
}
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
// and "foo" for Postgres.
private quote(str: string): string {
private quoteChars(): [string, string] {
switch (this.client) {
case SqlClient.SQL_LITE:
case SqlClient.ORACLE:
case SqlClient.POSTGRES:
return `"${str}"`
return ['"', '"']
case SqlClient.MS_SQL:
return `[${str}]`
return ["[", "]"]
case SqlClient.MARIADB:
case SqlClient.MY_SQL:
return `\`${str}\``
case SqlClient.SQL_LITE:
return ["`", "`"]
}
}
// Takes a string like a.b.c and returns a quoted identifier like [a].[b].[c]
// for SQL Server and `a`.`b`.`c` for MySQL.
private quotedIdentifier(key: string): string {
return key
.split(".")
.map(part => this.quote(part))
.join(".")
// Takes a string like foo and returns a quoted string like [foo] for SQL Server
// and "foo" for Postgres.
private quote(str: string): string {
const [start, end] = this.quoteChars()
return `${start}${str}${end}`
}
private isQuoted(key: string): boolean {
const [start, end] = this.quoteChars()
return key.startsWith(start) && key.endsWith(end)
}
// Takes a string like a.b.c or an array like ["a", "b", "c"] and returns a
// quoted identifier like [a].[b].[c] for SQL Server and `a`.`b`.`c` for
// MySQL.
private quotedIdentifier(key: string | string[]): string {
if (!Array.isArray(key)) {
key = this.splitIdentifier(key)
}
return key.map(part => this.quote(part)).join(".")
}
// Turns an identifier like a.b.c or `a`.`b`.`c` into ["a", "b", "c"]
private splitIdentifier(key: string): string[] {
const [start, end] = this.quoteChars()
if (this.isQuoted(key)) {
return key.slice(1, -1).split(`${end}.${start}`)
}
return key.split(".")
}
private qualifyIdentifier(key: string): string {
const tableName = this.getTableName()
const parts = this.splitIdentifier(key)
if (parts[0] !== tableName) {
parts.unshift(tableName)
}
if (this.isQuoted(key)) {
return this.quotedIdentifier(parts)
}
return parts.join(".")
}
private isFullSelectStatementRequired(): boolean {
@ -178,15 +209,13 @@ class InternalBuilder {
}
private generateSelectStatement(): (string | Knex.Raw)[] | "*" {
const { meta, endpoint, resource, tableAliases } = this.query
const { meta, endpoint, resource } = this.query
if (!resource || !resource.fields || resource.fields.length === 0) {
return "*"
}
const alias = tableAliases?.[endpoint.entityId]
? tableAliases?.[endpoint.entityId]
: endpoint.entityId
const alias = this.getTableName(endpoint.entityId)
const schema = meta.table.schema
if (!this.isFullSelectStatementRequired()) {
return [this.knex.raw(`${this.quote(alias)}.*`)]
@ -234,11 +263,17 @@ class InternalBuilder {
// OracleDB can't use character-large-objects (CLOBs) in WHERE clauses,
// so when we use them we need to wrap them in to_char(). This function
// converts a field name to the appropriate identifier.
private convertClobs(field: string): string {
const parts = field.split(".")
private convertClobs(field: string, opts?: { forSelect?: boolean }): string {
if (this.client !== SqlClient.ORACLE) {
throw new Error(
"you've called convertClobs on a DB that's not Oracle, this is a mistake"
)
}
const parts = this.splitIdentifier(field)
const col = parts.pop()!
const schema = this.table.schema[col]
let identifier = this.quotedIdentifier(field)
if (
schema.type === FieldType.STRING ||
schema.type === FieldType.LONGFORM ||
@ -247,7 +282,11 @@ class InternalBuilder {
schema.type === FieldType.OPTIONS ||
schema.type === FieldType.BARCODEQR
) {
identifier = `to_char(${identifier})`
if (opts?.forSelect) {
identifier = `to_char(${identifier}) as ${this.quotedIdentifier(col)}`
} else {
identifier = `to_char(${identifier})`
}
}
return identifier
}
@ -287,7 +326,7 @@ class InternalBuilder {
return input
}
private parseBody(body: any) {
private parseBody(body: Record<string, any>) {
for (let [key, value] of Object.entries(body)) {
const { column } = this.splitter.run(key)
const schema = this.table.schema[column]
@ -811,26 +850,125 @@ class InternalBuilder {
return query
}
isSqs(): boolean {
return isSqs(this.table)
}
getTableName(tableOrName?: Table | string): string {
let table: Table
if (typeof tableOrName === "string") {
const name = tableOrName
if (this.query.table?.name === name) {
table = this.query.table
} else if (this.query.meta.table?.name === name) {
table = this.query.meta.table
} else if (!this.query.meta.tables?.[name]) {
// This can legitimately happen in custom queries, where the user is
// querying against a table that may not have been imported into
// Budibase.
return name
} else {
table = this.query.meta.tables[name]
}
} else if (tableOrName) {
table = tableOrName
} else {
table = this.table
}
let name = table.name
if (isSqs(table) && table._id) {
// SQS uses the table ID rather than the table name
name = table._id
}
const aliases = this.query.tableAliases || {}
return aliases[name] ? aliases[name] : name
}
addDistinctCount(query: Knex.QueryBuilder): Knex.QueryBuilder {
const primary = this.table.primary
const aliases = this.query.tableAliases
const aliased =
this.table.name && aliases?.[this.table.name]
? aliases[this.table.name]
: this.table.name
if (!primary) {
if (!this.table.primary) {
throw new Error("SQL counting requires primary key to be supplied")
}
return query.countDistinct(`${aliased}.${primary[0]} as total`)
return query.countDistinct(
`${this.getTableName()}.${this.table.primary[0]} as ${COUNT_FIELD_NAME}`
)
}
addAggregations(
query: Knex.QueryBuilder,
aggregations: Aggregation[]
): Knex.QueryBuilder {
const fields = this.query.resource?.fields || []
const tableName = this.getTableName()
if (fields.length > 0) {
const qualifiedFields = fields.map(field => this.qualifyIdentifier(field))
if (this.client === SqlClient.ORACLE) {
const groupByFields = qualifiedFields.map(field =>
this.convertClobs(field)
)
const selectFields = qualifiedFields.map(field =>
this.convertClobs(field, { forSelect: true })
)
query = query
.groupByRaw(groupByFields.join(", "))
.select(this.knex.raw(selectFields.join(", ")))
} else {
query = query.groupBy(qualifiedFields).select(qualifiedFields)
}
}
for (const aggregation of aggregations) {
const op = aggregation.calculationType
if (op === CalculationType.COUNT) {
if ("distinct" in aggregation && aggregation.distinct) {
if (this.client === SqlClient.ORACLE) {
const field = this.convertClobs(`${tableName}.${aggregation.field}`)
query = query.select(
this.knex.raw(
`COUNT(DISTINCT ${field}) as ${this.quotedIdentifier(
aggregation.name
)}`
)
)
} else {
query = query.countDistinct(
`${tableName}.${aggregation.field} as ${aggregation.name}`
)
}
} else {
query = query.count(`* as ${aggregation.name}`)
}
} else {
const field = `${tableName}.${aggregation.field} as ${aggregation.name}`
switch (op) {
case CalculationType.SUM:
query = query.sum(field)
break
case CalculationType.AVG:
query = query.avg(field)
break
case CalculationType.MIN:
query = query.min(field)
break
case CalculationType.MAX:
query = query.max(field)
break
}
}
}
return query
}
isAggregateField(field: string): boolean {
const found = this.query.resource?.aggregations?.find(
aggregation => aggregation.name === field
)
return !!found
}
addSorting(query: Knex.QueryBuilder): Knex.QueryBuilder {
let { sort } = this.query
let { sort, resource } = this.query
const primaryKey = this.table.primary
const tableName = getTableName(this.table)
const aliases = this.query.tableAliases
const aliased =
tableName && aliases?.[tableName] ? aliases[tableName] : this.table?.name
const aliased = this.getTableName()
if (!Array.isArray(primaryKey)) {
throw new Error("Sorting requires primary key to be specified for table")
}
@ -849,20 +987,25 @@ class InternalBuilder {
nulls = value.direction === SortOrder.ASCENDING ? "first" : "last"
}
let composite = `${aliased}.${key}`
if (this.client === SqlClient.ORACLE) {
query = query.orderByRaw(
`${this.convertClobs(composite)} ${direction} nulls ${nulls}`
)
if (this.isAggregateField(key)) {
query = query.orderBy(key, direction, nulls)
} else {
query = query.orderBy(composite, direction, nulls)
let composite = `${aliased}.${key}`
if (this.client === SqlClient.ORACLE) {
query = query.orderByRaw(
`${this.convertClobs(composite)} ${direction} nulls ${nulls}`
)
} else {
query = query.orderBy(composite, direction, nulls)
}
}
}
}
// add sorting by the primary key if the result isn't already sorted by it,
// to make sure result is deterministic
if (!sort || sort[primaryKey[0]] === undefined) {
const hasAggregations = (resource?.aggregations?.length ?? 0) > 0
if (!hasAggregations && (!sort || sort[primaryKey[0]] === undefined)) {
query = query.orderBy(`${aliased}.${primaryKey[0]}`)
}
return query
@ -1128,6 +1271,10 @@ class InternalBuilder {
create(opts: QueryOptions): Knex.QueryBuilder {
const { body } = this.query
if (!body) {
throw new Error("Cannot create without row body")
}
let query = this.qualifiedKnex({ alias: false })
const parsedBody = this.parseBody(body)
@ -1246,10 +1393,15 @@ class InternalBuilder {
}
}
// if counting, use distinct count, else select
query = !counting
? query.select(this.generateSelectStatement())
: this.addDistinctCount(query)
const aggregations = this.query.resource?.aggregations || []
if (counting) {
query = this.addDistinctCount(query)
} else if (aggregations.length > 0) {
query = this.addAggregations(query, aggregations)
} else {
query = query.select(this.generateSelectStatement())
}
// have to add after as well (this breaks MS-SQL)
if (!counting) {
query = this.addSorting(query)
@ -1281,6 +1433,9 @@ class InternalBuilder {
update(opts: QueryOptions): Knex.QueryBuilder {
const { body, filters } = this.query
if (!body) {
throw new Error("Cannot update without row body")
}
let query = this.qualifiedKnex()
const parsedBody = this.parseBody(body)
query = this.addFilters(query, filters)
@ -1468,23 +1623,40 @@ class SqlQueryBuilder extends SqlTableQueryBuilder {
return results.length ? results : [{ [operation.toLowerCase()]: true }]
}
private getTableName(
table: Table,
aliases?: Record<string, string>
): string | undefined {
let name = table.name
if (
table.sourceType === TableSourceType.INTERNAL ||
table.sourceId === INTERNAL_TABLE_SOURCE_ID
) {
if (!table._id) {
return
}
// SQS uses the table ID rather than the table name
name = table._id
}
return aliases?.[name] || name
}
convertJsonStringColumns<T extends Record<string, any>>(
table: Table,
results: T[],
aliases?: Record<string, string>
): T[] {
const tableName = getTableName(table)
const tableName = this.getTableName(table, aliases)
for (const [name, field] of Object.entries(table.schema)) {
if (!this._isJsonColumn(field)) {
continue
}
const aliasedTableName = (tableName && aliases?.[tableName]) || tableName
const fullName = `${aliasedTableName}.${name}`
const fullName = `${tableName}.${name}` as keyof T
for (let row of results) {
if (typeof row[fullName as keyof T] === "string") {
row[fullName as keyof T] = JSON.parse(row[fullName])
if (typeof row[fullName] === "string") {
row[fullName] = JSON.parse(row[fullName])
}
if (typeof row[name as keyof T] === "string") {
if (typeof row[name] === "string") {
row[name as keyof T] = JSON.parse(row[name])
}
}

View File

@ -17,16 +17,14 @@ import {
ContextUser,
CouchFindOptions,
DatabaseQueryOpts,
SearchFilters,
SearchUsersRequest,
User,
BasicOperator,
ArrayOperator,
} from "@budibase/types"
import * as context from "../context"
import { getGlobalDB } from "../context"
import { isCreator } from "./utils"
import { UserDB } from "./db"
import { dataFilters } from "@budibase/shared-core"
type GetOpts = { cleanup?: boolean }
@ -45,32 +43,6 @@ function removeUserPassword(users: User | User[]) {
return users
}
export function isSupportedUserSearch(query: SearchFilters) {
const allowed = [
{ op: BasicOperator.STRING, key: "email" },
{ op: BasicOperator.EQUAL, key: "_id" },
{ op: ArrayOperator.ONE_OF, key: "_id" },
]
for (let [key, operation] of Object.entries(query)) {
if (typeof operation !== "object") {
return false
}
const fields = Object.keys(operation || {})
// this filter doesn't contain options - ignore
if (fields.length === 0) {
continue
}
const allowedOperation = allowed.find(
allow =>
allow.op === key && fields.length === 1 && fields[0] === allow.key
)
if (!allowedOperation) {
return false
}
}
return true
}
export async function bulkGetGlobalUsersById(
userIds: string[],
opts?: GetOpts
@ -291,10 +263,17 @@ export async function paginatedUsers({
userList = await bulkGetGlobalUsersById(query?.oneOf?._id, {
cleanup: true,
})
} else if (query) {
// TODO: this should use SQS search, but the logic is built in the 'server' package. Using the in-memory filtering to get this working meanwhile
const response = await db.allDocs<User>(
getGlobalUserParams(null, { ...opts, limit: undefined })
)
userList = response.rows.map(row => row.doc!)
userList = dataFilters.search(userList, { query, limit: opts.limit }).rows
} else {
// no search, query allDocs
const response = await db.allDocs(getGlobalUserParams(null, opts))
userList = response.rows.map((row: any) => row.doc)
const response = await db.allDocs<User>(getGlobalUserParams(null, opts))
userList = response.rows.map(row => row.doc!)
}
return pagination(userList, pageSize, {
paginate: true,

View File

@ -6,9 +6,6 @@ import {
AccountSSOProviderType,
AuthType,
CloudAccount,
CreateAccount,
CreatePassswordAccount,
CreateVerifiableSSOAccount,
Hosting,
SSOAccount,
} from "@budibase/types"
@ -19,6 +16,7 @@ export const account = (partial: Partial<Account> = {}): Account => {
accountId: uuid(),
tenantId: generator.word(),
email: generator.email({ domain: "example.com" }),
accountName: generator.word(),
tenantName: generator.word(),
hosting: Hosting.SELF,
createdAt: Date.now(),
@ -61,10 +59,8 @@ export function ssoAccount(account: Account = cloudAccount()): SSOAccount {
accessToken: generator.string(),
refreshToken: generator.string(),
},
pictureUrl: generator.url(),
provider: provider(),
providerType: providerType(),
thirdPartyProfile: {},
}
}
@ -78,68 +74,7 @@ export function verifiableSsoAccount(
accessToken: generator.string(),
refreshToken: generator.string(),
},
pictureUrl: generator.url(),
provider: AccountSSOProvider.MICROSOFT,
providerType: AccountSSOProviderType.MICROSOFT,
thirdPartyProfile: { id: "abc123" },
}
}
export const cloudCreateAccount: CreatePassswordAccount = {
email: "cloud@budibase.com",
tenantId: "cloud",
hosting: Hosting.CLOUD,
authType: AuthType.PASSWORD,
password: "Password123!",
tenantName: "cloud",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}
export const cloudSSOCreateAccount: CreateAccount = {
email: "cloud-sso@budibase.com",
tenantId: "cloud-sso",
hosting: Hosting.CLOUD,
authType: AuthType.SSO,
tenantName: "cloudsso",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}
export const cloudVerifiableSSOCreateAccount: CreateVerifiableSSOAccount = {
email: "cloud-sso@budibase.com",
tenantId: "cloud-sso",
hosting: Hosting.CLOUD,
authType: AuthType.SSO,
tenantName: "cloudsso",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
provider: AccountSSOProvider.MICROSOFT,
thirdPartyProfile: { id: "abc123" },
}
export const selfCreateAccount: CreatePassswordAccount = {
email: "self@budibase.com",
tenantId: "self",
hosting: Hosting.SELF,
authType: AuthType.PASSWORD,
password: "Password123!",
tenantName: "self",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}
export const selfSSOCreateAccount: CreateAccount = {
email: "self-sso@budibase.com",
tenantId: "self-sso",
hosting: Hosting.SELF,
authType: AuthType.SSO,
tenantName: "selfsso",
name: "Budi Armstrong",
size: "10+",
profession: "Software Engineer",
}

View File

@ -25,7 +25,6 @@ export const user = (userProps?: Partial<Omit<User, "userId">>): User => {
roles: { app_test: "admin" },
firstName: generator.first(),
lastName: generator.last(),
pictureUrl: "http://example.com",
tenantId: tenant.id(),
...userProps,
}
@ -86,9 +85,5 @@ export function ssoUser(
oauth2: opts.details?.oauth2,
provider: opts.details?.provider!,
providerType: opts.details?.providerType!,
thirdPartyProfile: {
email: base.email,
picture: base.pictureUrl,
},
}
}

View File

@ -0,0 +1,29 @@
<script>
export let width
export let height
</script>
<svg
{width}
{height}
viewBox="0 0 13 12"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M9.4179 4.13222C9.4179 3.73121 9.26166 3.35428 8.97913 3.07175C8.41342 2.50538 7.4239 2.50408 6.85753 3.07175L5.64342 4.28586C5.6291 4.30018 5.61543 4.3158 5.60305 4.33143C5.58678 4.3438 5.5718 4.35747 5.55683 4.37244L0.491426 9.43785C0.208245 9.72103 0.052002 10.098 0.052002 10.4983C0.052002 10.8987 0.208245 11.2756 0.491426 11.5588C0.774607 11.842 1.15153 11.9982 1.5519 11.9982C1.95227 11.9982 2.32919 11.842 2.61238 11.5588L8.97848 5.1927C9.26166 4.90952 9.4179 4.53259 9.4179 4.13222ZM1.90539 10.8518C1.7166 11.0406 1.3872 11.0406 1.1984 10.8518C1.10401 10.7574 1.05193 10.6318 1.05193 10.4983C1.05193 10.3649 1.104 10.2392 1.1984 10.1448L5.99821 5.34503L6.70845 6.04875L1.90539 10.8518ZM8.2715 4.48571L7.41544 5.34178L6.7052 4.63805L7.56452 3.77873C7.7533 3.58995 8.08271 3.58929 8.2715 3.77939C8.36589 3.87313 8.41798 3.99877 8.41798 4.13223C8.41798 4.26569 8.3659 4.39132 8.2715 4.48571Z"
fill="#C8C8C8"
/>
<path
d="M11.8552 6.55146L11.0144 6.21913L10.879 5.32449C10.8356 5.03919 10.3737 4.98776 10.2686 5.255L9.93606 6.09642L9.04143 6.23085C8.89951 6.25216 8.78884 6.36658 8.77257 6.50947C8.75629 6.65253 8.83783 6.78826 8.97193 6.84148L9.81335 7.17464L9.94794 8.06862C9.9691 8.21053 10.0835 8.32121 10.2266 8.33748C10.3695 8.35375 10.5052 8.27221 10.5586 8.13811L10.8914 7.29751L11.7855 7.1621C11.9283 7.1403 12.0381 7.02637 12.0544 6.88348C12.0707 6.74058 11.9887 6.60403 11.8552 6.55146Z"
fill="#F9634C"
/>
<path
d="M8.94215 1.76145L9.78356 2.0946L9.91815 2.9885C9.93931 3.13049 10.0539 3.24117 10.1968 3.25744C10.3398 3.27371 10.4756 3.19218 10.5288 3.05807L10.8618 2.21739L11.7559 2.08207C11.8985 2.06034 12.0085 1.94633 12.0248 1.80344C12.0411 1.66054 11.959 1.524 11.8254 1.47143L10.9847 1.13909L10.8494 0.244456C10.806 -0.0409246 10.3439 -0.0922745 10.2388 0.174881L9.90643 1.0163L9.0118 1.15089C8.86972 1.17213 8.75905 1.28654 8.74278 1.42952C8.72651 1.57249 8.80804 1.70823 8.94215 1.76145Z"
fill="#8488FD"
/>
<path
d="M3.2379 2.46066L3.92063 2.73091L4.02984 3.45637C4.04709 3.57151 4.14002 3.66135 4.25606 3.67453C4.37194 3.6878 4.48212 3.62163 4.52541 3.51276L4.79557 2.83059L5.52094 2.72074C5.63682 2.70316 5.72601 2.61072 5.73936 2.49468C5.75254 2.37864 5.68597 2.26797 5.57758 2.22533L4.89533 1.95565L4.78548 1.22963C4.75016 0.998038 4.37535 0.956375 4.29007 1.17315L4.0204 1.85597L3.29437 1.96517C3.17915 1.98235 3.08931 2.07527 3.07613 2.19131C3.06294 2.30727 3.12902 2.41737 3.2379 2.46066Z"
fill="#F7D804"
/>
</svg>

View File

@ -67,6 +67,7 @@
"@spectrum-css/vars": "^3.0.1",
"@zerodevx/svelte-json-view": "^1.0.7",
"codemirror": "^5.65.16",
"cron-parser": "^4.9.0",
"dayjs": "^1.10.8",
"downloadjs": "1.4.7",
"fast-json-patch": "^3.1.1",

View File

@ -641,6 +641,8 @@
let hasUserDefinedName = automation.stepNames?.[allSteps[idx]?.id]
if (isLoopBlock) {
runtimeName = `loop.${name}`
} else if (idx === 0) {
runtimeName = `trigger.${name}`
} else if (block.name.startsWith("JS")) {
runtimeName = hasUserDefinedName
? `stepsByName["${bindingName}"].${name}`
@ -650,7 +652,7 @@
? `stepsByName.${bindingName}.${name}`
: `steps.${idx - loopBlockCount}.${name}`
}
return idx === 0 ? `trigger.${name}` : runtimeName
return runtimeName
}
const determineCategoryName = (idx, isLoopBlock, bindingName) => {
@ -677,7 +679,7 @@
)
return {
readableBinding:
bindingName && !isLoopBlock
bindingName && !isLoopBlock && idx !== 0
? `steps.${bindingName}.${name}`
: runtimeBinding,
runtimeBinding,
@ -1048,7 +1050,7 @@
{:else if value.customType === "cron"}
<CronBuilder
on:change={e => onChange({ [key]: e.detail })}
value={inputData[key]}
cronExpression={inputData[key]}
/>
{:else if value.customType === "automationFields"}
<AutomationSelector

View File

@ -1,41 +1,70 @@
<script>
import { Button, Select, Input, Label } from "@budibase/bbui"
import {
Select,
InlineAlert,
Input,
Label,
Layout,
notifications,
} from "@budibase/bbui"
import { onMount, createEventDispatcher } from "svelte"
import { flags } from "stores/builder"
import { licensing } from "stores/portal"
import { API } from "api"
import MagicWand from "../../../../assets/MagicWand.svelte"
import { helpers, REBOOT_CRON } from "@budibase/shared-core"
const dispatch = createEventDispatcher()
export let value
export let cronExpression
let error
let nextExecutions
// AI prompt
let aiCronPrompt = ""
let loadingAICronExpression = false
$: aiEnabled =
$licensing.customAIConfigsEnabled || $licensing.budibaseAIEnabled
$: {
const exists = CRON_EXPRESSIONS.some(cron => cron.value === value)
const customIndex = CRON_EXPRESSIONS.findIndex(
cron => cron.label === "Custom"
)
if (!exists && customIndex === -1) {
CRON_EXPRESSIONS[0] = { label: "Custom", value: value }
} else if (exists && customIndex !== -1) {
CRON_EXPRESSIONS.splice(customIndex, 1)
if (cronExpression) {
try {
nextExecutions = helpers.cron
.getNextExecutionDates(cronExpression)
.join("\n")
} catch (err) {
nextExecutions = null
}
}
}
const onChange = e => {
if (value !== REBOOT_CRON) {
if (e.detail !== REBOOT_CRON) {
error = helpers.cron.validate(e.detail).err
}
if (e.detail === value || error) {
if (e.detail === cronExpression || error) {
return
}
value = e.detail
cronExpression = e.detail
dispatch("change", e.detail)
}
const updatePreset = e => {
aiCronPrompt = ""
onChange(e)
}
const updateCronExpression = e => {
aiCronPrompt = ""
cronExpression = null
nextExecutions = null
onChange(e)
}
let touched = false
let presets = false
const CRON_EXPRESSIONS = [
{
@ -64,45 +93,130 @@
})
}
})
async function generateAICronExpression() {
loadingAICronExpression = true
try {
const response = await API.generateCronExpression({
prompt: aiCronPrompt,
})
cronExpression = response.message
dispatch("change", response.message)
} catch (err) {
notifications.error(err.message)
} finally {
loadingAICronExpression = false
}
}
</script>
<div class="block-field">
<!-- svelte-ignore a11y-click-events-have-key-events -->
<!-- svelte-ignore a11y-no-static-element-interactions -->
<Layout noPadding gap="S">
<Select
on:change={updatePreset}
value={cronExpression || "Custom"}
secondary
extraThin
label="Use a Preset (Optional)"
options={CRON_EXPRESSIONS}
/>
{#if aiEnabled}
<div class="cron-ai-generator">
<Input
bind:value={aiCronPrompt}
label="Generate Cron Expression with AI"
size="S"
placeholder="Run every hour between 1pm to 4pm everyday of the week"
/>
{#if aiCronPrompt}
<div
class="icon"
class:pulsing-text={loadingAICronExpression}
on:click={generateAICronExpression}
>
<MagicWand height="17" width="17" />
</div>
{/if}
</div>
{/if}
<Input
label="Cron Expression"
{error}
on:change={onChange}
{value}
on:change={updateCronExpression}
value={cronExpression}
on:blur={() => (touched = true)}
updateOnChange={false}
/>
{#if touched && !value}
{#if touched && !cronExpression}
<Label><div class="error">Please specify a CRON expression</div></Label>
{/if}
<div class="presets">
<Button on:click={() => (presets = !presets)}
>{presets ? "Hide" : "Show"} Presets</Button
>
{#if presets}
<Select
on:change={onChange}
value={value || "Custom"}
secondary
extraThin
label="Presets"
options={CRON_EXPRESSIONS}
/>
{/if}
</div>
</div>
{#if nextExecutions}
<InlineAlert
type="info"
header="Next Executions"
message={nextExecutions}
/>
{/if}
</Layout>
<style>
.presets {
margin-top: var(--spacing-m);
.cron-ai-generator {
flex: 1;
position: relative;
}
.block-field {
padding-top: var(--spacing-s);
.icon {
right: 1px;
bottom: 1px;
position: absolute;
justify-content: center;
align-items: center;
display: flex;
flex-direction: row;
box-sizing: border-box;
border-left: 1px solid var(--spectrum-alias-border-color);
border-top-right-radius: var(--spectrum-alias-border-radius-regular);
border-bottom-right-radius: var(--spectrum-alias-border-radius-regular);
width: 31px;
color: var(--spectrum-alias-text-color);
background-color: var(--spectrum-global-color-gray-75);
transition: background-color
var(--spectrum-global-animation-duration-100, 130ms),
box-shadow var(--spectrum-global-animation-duration-100, 130ms),
border-color var(--spectrum-global-animation-duration-100, 130ms);
height: calc(var(--spectrum-alias-item-height-m) - 2px);
}
.icon:hover {
cursor: pointer;
color: var(--spectrum-alias-text-color-hover);
background-color: var(--spectrum-global-color-gray-50);
border-color: var(--spectrum-alias-border-color-hover);
}
.error {
padding-top: var(--spacing-xs);
color: var(--spectrum-global-color-red-500);
}
.pulsing-text {
font-size: 24px;
font-weight: bold;
animation: pulse 1.5s infinite;
}
@keyframes pulse {
0% {
opacity: 0.3;
transform: scale(1);
}
50% {
opacity: 1;
transform: scale(1.05);
}
100% {
opacity: 0.3;
transform: scale(1);
}
}
</style>

View File

@ -6,7 +6,7 @@
import { TableNames, UNEDITABLE_USER_FIELDS } from "constants"
import RoleCell from "./cells/RoleCell.svelte"
import { createEventDispatcher } from "svelte"
import { canBeSortColumn } from "@budibase/shared-core"
import { canBeSortColumn } from "@budibase/frontend-core"
export let schema = {}
export let data = []
@ -31,7 +31,7 @@
acc[key] =
typeof schema[key] === "string" ? { type: schema[key] } : schema[key]
if (!canBeSortColumn(acc[key].type)) {
if (!canBeSortColumn(acc[key])) {
acc[key].sortable = false
}
return acc

View File

@ -1,6 +1,6 @@
<script>
import { viewsV2 } from "stores/builder"
import { admin } from "stores/portal"
import { admin, themeStore } from "stores/portal"
import { Grid } from "@budibase/frontend-core"
import { API } from "api"
import GridCreateEditRowModal from "components/backend/DataTable/modals/grid/GridCreateEditRowModal.svelte"
@ -16,6 +16,9 @@
tableId: $viewsV2.selected?.tableId,
}
$: currentTheme = $themeStore?.theme
$: darkMode = !currentTheme.includes("light")
const handleGridViewUpdate = async e => {
viewsV2.replaceView(id, e.detail)
}
@ -25,6 +28,7 @@
<Grid
{API}
{datasource}
{darkMode}
allowAddRows
allowDeleteRows
showAvatars={false}

View File

@ -19,9 +19,9 @@
helpers,
PROTECTED_INTERNAL_COLUMNS,
PROTECTED_EXTERNAL_COLUMNS,
canBeDisplayColumn,
canHaveDefaultColumn,
} from "@budibase/shared-core"
import { makePropSafe } from "@budibase/string-templates"
import { createEventDispatcher, getContext, onMount } from "svelte"
import { cloneDeep } from "lodash/fp"
import { tables, datasources } from "stores/builder"
@ -43,10 +43,11 @@
SourceName,
} from "@budibase/types"
import RelationshipSelector from "components/common/RelationshipSelector.svelte"
import { RowUtils } from "@budibase/frontend-core"
import { RowUtils, canBeDisplayColumn } from "@budibase/frontend-core"
import ServerBindingPanel from "components/common/bindings/ServerBindingPanel.svelte"
import OptionsEditor from "./OptionsEditor.svelte"
import { isEnabled } from "helpers/featureFlags"
import { getUserBindings } from "dataBinding"
const AUTO_TYPE = FieldType.AUTO
const FORMULA_TYPE = FieldType.FORMULA
@ -166,7 +167,7 @@
: availableAutoColumns
// used to select what different options can be displayed for column type
$: canBeDisplay =
canBeDisplayColumn(editableColumn.type) && !editableColumn.autocolumn
canBeDisplayColumn(editableColumn) && !editableColumn.autocolumn
$: canHaveDefault =
isEnabled("DEFAULT_VALUES") && canHaveDefaultColumn(editableColumn.type)
$: canBeRequired =
@ -192,6 +193,19 @@
fieldId: makeFieldId(t.type, t.subtype),
...t,
}))
$: defaultValueBindings = [
{
type: "context",
runtimeBinding: `${makePropSafe("now")}`,
readableBinding: `Date`,
category: "Date",
icon: "Date",
display: {
name: "Server date",
},
},
...getUserBindings(),
]
const fieldDefinitions = Object.values(FIELDS).reduce(
// Storing the fields by complex field id
@ -782,9 +796,8 @@
setRequired(false)
}
}}
bindings={getBindings({ table })}
bindings={defaultValueBindings}
allowJS
context={rowGoldenSample}
/>
</div>
{/if}

View File

@ -94,7 +94,7 @@
loadDependantInfo()
</script>
<ModalContent showCancelButton={false} confirmText="Done">
<ModalContent showCancelButton={false} showConfirmButton={false}>
<span slot="header">
Manage Access
{#if requiresPlanToModify}

View File

@ -2,7 +2,12 @@
import { getContext } from "svelte"
import CreateEditColumn from "components/backend/DataTable/modals/CreateEditColumn.svelte"
const { datasource } = getContext("grid")
const { datasource, rows } = getContext("grid")
const onUpdate = async () => {
await datasource.actions.refreshDefinition()
await rows.actions.refreshData()
}
</script>
<CreateEditColumn on:updatecolumns={datasource.actions.refreshDefinition} />
<CreateEditColumn on:updatecolumns={onUpdate} />

View File

@ -1,7 +1,8 @@
<script>
import { Select, Icon } from "@budibase/bbui"
import { FIELDS } from "constants/backend"
import { canBeDisplayColumn, utils } from "@budibase/shared-core"
import { utils } from "@budibase/shared-core"
import { canBeDisplayColumn } from "@budibase/frontend-core"
import { API } from "api"
import { parseFile } from "./utils"
@ -100,10 +101,10 @@
let rawRows = []
$: displayColumnOptions = Object.keys(schema || {}).filter(column => {
return validation[column] && canBeDisplayColumn(schema[column].type)
return validation[column] && canBeDisplayColumn(schema[column])
})
$: if (displayColumn && !canBeDisplayColumn(schema[displayColumn].type)) {
$: if (displayColumn && !canBeDisplayColumn(schema[displayColumn])) {
displayColumn = null
}

View File

@ -66,6 +66,7 @@
let insertAtPos
let targetMode = null
let expressionResult
let expressionError
let evaluating = false
$: useSnippets = allowSnippets && !$licensing.isFreePlan
@ -142,10 +143,22 @@
}
const debouncedEval = Utils.debounce((expression, context, snippets) => {
expressionResult = processStringSync(expression || "", {
...context,
snippets,
})
try {
expressionError = null
expressionResult = processStringSync(
expression || "",
{
...context,
snippets,
},
{
noThrow: false,
}
)
} catch (err) {
expressionResult = null
expressionError = err
}
evaluating = false
}, 260)
@ -370,6 +383,7 @@
{:else if sidePanel === SidePanels.Evaluation}
<EvaluationSidePanel
{expressionResult}
{expressionError}
{evaluating}
expression={editorValue}
/>

View File

@ -3,26 +3,37 @@
import { Icon, ProgressCircle, notifications } from "@budibase/bbui"
import { copyToClipboard } from "@budibase/bbui/helpers"
import { fade } from "svelte/transition"
import { UserScriptError } from "@budibase/string-templates"
export let expressionResult
export let expressionError
export let evaluating = false
export let expression = null
$: error = expressionResult === "Error while executing JS"
$: error = expressionError != null
$: empty = expression == null || expression?.trim() === ""
$: success = !error && !empty
$: highlightedResult = highlight(expressionResult)
const formatError = err => {
if (err.code === UserScriptError.code) {
return err.userScriptError.toString()
}
return err.toString()
}
const highlight = json => {
if (json == null) {
return ""
}
// Attempt to parse and then stringify, in case this is valid JSON
// Attempt to parse and then stringify, in case this is valid result
try {
json = JSON.stringify(JSON.parse(json), null, 2)
} catch (err) {
// Ignore
}
return formatHighlight(json, {
keyColor: "#e06c75",
numberColor: "#e5c07b",
@ -34,7 +45,7 @@
}
const copy = () => {
let clipboardVal = expressionResult
let clipboardVal = expressionResult.result
if (typeof clipboardVal === "object") {
clipboardVal = JSON.stringify(clipboardVal, null, 2)
}
@ -73,6 +84,8 @@
<div class="body">
{#if empty}
Your expression will be evaluated here
{:else if error}
{formatError(expressionError)}
{:else}
<!-- eslint-disable-next-line svelte/no-at-html-tags-->
{@html highlightedResult}

View File

@ -1,4 +1,5 @@
<script>
import { enrichSchemaWithRelColumns } from "@budibase/frontend-core"
import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding"
import { selectedScreen, componentStore } from "stores/builder"
import DraggableList from "../DraggableList/DraggableList.svelte"
@ -27,7 +28,8 @@
delete schema._rev
}
return schema
const result = enrichSchemaWithRelColumns(schema)
return result
}
$: datasource = getDatasourceForProvider($selectedScreen, componentInstance)

View File

@ -82,7 +82,7 @@ const toDraggableListFormat = (gridFormatColumns, createComponent, schema) => {
active: column.active,
field: column.field,
label: column.label,
columnType: schema[column.field].type,
columnType: column.columnType || schema[column.field].type,
width: column.width,
conditions: column.conditions,
},

View File

@ -3,7 +3,7 @@
import { getDatasourceForProvider, getSchemaForDatasource } from "dataBinding"
import { selectedScreen } from "stores/builder"
import { createEventDispatcher } from "svelte"
import { canBeSortColumn } from "@budibase/shared-core"
import { canBeSortColumn } from "@budibase/frontend-core"
export let componentInstance = {}
export let value = ""
@ -17,7 +17,7 @@
const getSortableFields = schema => {
return Object.entries(schema || {})
.filter(entry => canBeSortColumn(entry[1].type))
.filter(entry => canBeSortColumn(entry[1]))
.map(entry => entry[0])
}

View File

@ -14,7 +14,13 @@
function daysUntilCancel() {
const cancelAt = license?.billing?.subscription?.cancelAt
const diffTime = Math.abs(cancelAt - new Date().getTime()) / 1000
return Math.floor(diffTime / oneDayInSeconds)
const days = Math.floor(diffTime / oneDayInSeconds)
if (days === 1) {
return "tomorrow."
} else if (days === 0) {
return "today."
}
return `in ${days} days.`
}
</script>
@ -28,7 +34,7 @@
extraLinkAction={$licensing.goToUpgradePage}
showCloseButton={false}
>
Your free trial will end in {daysUntilCancel()} days.
Your free trial will end {daysUntilCancel()}
</Banner>
</div>
{/if}

View File

@ -23,6 +23,7 @@
sourceType: DB_TYPE_EXTERNAL,
schema: {
id: {
name: "id",
autocolumn: true,
type: "number",
},

View File

@ -26,6 +26,7 @@
licensing,
environment,
enrichedApps,
sortBy,
} from "stores/portal"
import { goto } from "@roxi/routify"
import AppRow from "components/start/AppRow.svelte"
@ -247,7 +248,7 @@
<div class="app-actions">
<Select
autoWidth
value={$appsStore.sortBy}
value={$sortBy}
on:change={e => {
appsStore.updateSort(e.detail)
}}

View File

@ -4,6 +4,7 @@
import OpenAILogo from "./logos/OpenAI.svelte"
import AnthropicLogo from "./logos/Anthropic.svelte"
import TogetherAILogo from "./logos/TogetherAI.svelte"
import AzureOpenAILogo from "./logos/AzureOpenAI.svelte"
import { Providers } from "./constants"
const logos = {
@ -11,6 +12,7 @@
[Providers.OpenAI.name]: OpenAILogo,
[Providers.Anthropic.name]: AnthropicLogo,
[Providers.TogetherAI.name]: TogetherAILogo,
[Providers.AzureOpenAI.name]: AzureOpenAILogo,
}
export let config
@ -26,8 +28,8 @@
<div class="icon">
<svelte:component
this={logos[config.name || config.provider]}
height="30"
width="30"
height="18"
width="18"
/>
</div>
<div class="header">
@ -110,7 +112,7 @@
.tag {
display: flex;
color: var(--spectrum-body-m-text-color);
color: #ffffff;
padding: 4px 8px;
justify-content: center;
align-items: center;

View File

@ -1,6 +1,6 @@
import { it, expect, describe, vi } from "vitest"
import AISettings from "./index.svelte"
import { render } from "@testing-library/svelte"
import { render, fireEvent } from "@testing-library/svelte"
import { admin, licensing } from "stores/portal"
import { notifications } from "@budibase/bbui"
@ -55,39 +55,43 @@ describe("AISettings", () => {
expect(enterpriseTag).toBeInTheDocument()
})
it("should show the premium label on cloud when Budibase AI isn't enabled", async () => {
setupEnv(Hosting.Cloud)
instance = render(AISettings, {})
const premiumTag = instance.queryByText("Premium")
expect(premiumTag).toBeInTheDocument()
})
it("should not show the add configuration button if the user doesn't have the correct license on cloud", async () => {
it("the add configuration button should not do anything the user doesn't have the correct license on cloud", async () => {
let addConfigurationButton
let configModal
setupEnv(Hosting.Cloud)
instance = render(AISettings)
addConfigurationButton = instance.queryByText("Add configuration")
expect(addConfigurationButton).not.toBeInTheDocument()
expect(addConfigurationButton).toBeInTheDocument()
await fireEvent.click(addConfigurationButton)
configModal = instance.queryByText("Custom AI Configuration")
expect(configModal).not.toBeInTheDocument()
})
it("the add configuration button should open the config modal if the user has the correct license on cloud", async () => {
let addConfigurationButton
let configModal
setupEnv(Hosting.Cloud, { customAIConfigsEnabled: true })
instance = render(AISettings)
addConfigurationButton = instance.queryByText("Add configuration")
expect(addConfigurationButton).toBeInTheDocument()
await fireEvent.click(addConfigurationButton)
configModal = instance.queryByText("Custom AI Configuration")
expect(configModal).toBeInTheDocument()
})
it("should not show the add configuration button if the user doesn't have the correct license on self host", async () => {
it("the add configuration button should open the config modal if the user has the correct license on self host", async () => {
let addConfigurationButton
setupEnv(Hosting.Self)
instance = render(AISettings)
addConfigurationButton = instance.queryByText("Add configuration")
expect(addConfigurationButton).not.toBeInTheDocument()
let configModal
setupEnv(Hosting.Self, { customAIConfigsEnabled: true })
instance = render(AISettings, {})
instance = render(AISettings)
addConfigurationButton = instance.queryByText("Add configuration")
expect(addConfigurationButton).toBeInTheDocument()
await fireEvent.click(addConfigurationButton)
configModal = instance.queryByText("Custom AI Configuration")
expect(configModal).toBeInTheDocument()
})
})
})

View File

@ -84,8 +84,10 @@
<Label size="M">API Key</Label>
<Input type="password" bind:value={config.apiKey} />
</div>
<Toggle text="Active" bind:value={config.active} />
<Toggle text="Set as default" bind:value={config.isDefault} />
<div class="form-row">
<Toggle text="Active" bind:value={config.active} />
<Toggle text="Set as default" bind:value={config.isDefault} />
</div>
</ModalContent>
<style>

View File

@ -23,7 +23,7 @@ export const Providers = {
models: [{ label: "Llama 3 8B", value: "meta-llama/Meta-Llama-3-8B" }],
},
AzureOpenAI: {
name: "Azure Open AI",
name: "Azure OpenAI",
models: [
{ label: "GPT 4o Mini", value: "gpt-4o-mini" },
{ label: "GPT 4o", value: "gpt-4o" },

View File

@ -27,7 +27,6 @@
let editingUuid
$: isCloud = $admin.cloud
$: budibaseAIEnabled = $licensing.budibaseAIEnabled
$: customAIConfigsEnabled = $licensing.customAIConfigsEnabled
async function fetchAIConfig() {
@ -127,18 +126,8 @@
</Modal>
<Layout noPadding>
<Layout gap="XS" noPadding>
<Heading size="M">AI</Heading>
{#if isCloud && !budibaseAIEnabled}
<Tags>
<Tag icon="LockClosed">Premium</Tag>
</Tags>
{/if}
<Body>Configure your AI settings within this section:</Body>
</Layout>
<Divider />
<Layout noPadding>
<div class="config-heading">
<Heading size="S">AI Configurations</Heading>
<div class="header">
<Heading size="M">AI</Heading>
{#if !isCloud && !customAIConfigsEnabled}
<Tags>
<Tag icon="LockClosed">Premium</Tag>
@ -147,24 +136,43 @@
<Tags>
<Tag icon="LockClosed">Enterprise</Tag>
</Tags>
{:else}
<Button size="S" cta on:click={newConfig}>Add configuration</Button>
{/if}
</div>
<Body size="S"
>Use the following interface to select your preferred AI configuration.</Body
>
<Body size="S">Select your AI Model:</Body>
{#if fullAIConfig?.config}
{#each Object.keys(fullAIConfig.config) as key}
<AIConfigTile
config={fullAIConfig.config[key]}
editHandler={() => editConfig(key)}
deleteHandler={() => deleteConfig(key)}
/>
{/each}
{/if}
<Body>Configure your AI settings within this section:</Body>
</Layout>
<Divider />
<div style={`opacity: ${customAIConfigsEnabled ? 1 : 0.5}`}>
<Layout noPadding>
<div class="config-heading">
<Heading size="S">AI Configurations</Heading>
<Button
size="S"
cta={customAIConfigsEnabled}
secondary={!customAIConfigsEnabled}
on:click={customAIConfigsEnabled ? newConfig : null}
>
Add configuration
</Button>
</div>
<Body size="S"
>Use the following interface to select your preferred AI configuration.</Body
>
{#if customAIConfigsEnabled}
<Body size="S">Select your AI Model:</Body>
{/if}
{#if fullAIConfig?.config}
{#each Object.keys(fullAIConfig.config) as key}
<AIConfigTile
config={fullAIConfig.config[key]}
editHandler={customAIConfigsEnabled ? () => editConfig(key) : null}
deleteHandler={customAIConfigsEnabled
? () => deleteConfig(key)
: null}
/>
{/each}
{/if}
</Layout>
</div>
</Layout>
<style>
@ -172,5 +180,12 @@
display: flex;
justify-content: space-between;
align-items: center;
margin-bottom: -18px;
}
.header {
display: flex;
align-items: center;
gap: 12px;
}
</style>

View File

@ -0,0 +1,64 @@
<script>
export let width
export let height
</script>
<svg xmlns="http://www.w3.org/2000/svg" {width} {height} viewBox="0 0 96 96">
<defs>
<linearGradient
id="e399c19f-b68f-429d-b176-18c2117ff73c"
x1="-1032.172"
x2="-1059.213"
y1="145.312"
y2="65.426"
gradientTransform="matrix(1 0 0 -1 1075 158)"
gradientUnits="userSpaceOnUse"
>
<stop offset="0" stop-color="#114a8b" />
<stop offset="1" stop-color="#0669bc" />
</linearGradient>
<linearGradient
id="ac2a6fc2-ca48-4327-9a3c-d4dcc3256e15"
x1="-1023.725"
x2="-1029.98"
y1="108.083"
y2="105.968"
gradientTransform="matrix(1 0 0 -1 1075 158)"
gradientUnits="userSpaceOnUse"
>
<stop offset="0" stop-opacity=".3" />
<stop offset=".071" stop-opacity=".2" />
<stop offset=".321" stop-opacity=".1" />
<stop offset=".623" stop-opacity=".05" />
<stop offset="1" stop-opacity="0" />
</linearGradient>
<linearGradient
id="a7fee970-a784-4bb1-af8d-63d18e5f7db9"
x1="-1027.165"
x2="-997.482"
y1="147.642"
y2="68.561"
gradientTransform="matrix(1 0 0 -1 1075 158)"
gradientUnits="userSpaceOnUse"
>
<stop offset="0" stop-color="#3ccbf4" />
<stop offset="1" stop-color="#2892df" />
</linearGradient>
</defs>
<path
fill="url(#e399c19f-b68f-429d-b176-18c2117ff73c)"
d="M33.338 6.544h26.038l-27.03 80.087a4.152 4.152 0 0 1-3.933 2.824H8.149a4.145 4.145 0 0 1-3.928-5.47L29.404 9.368a4.152 4.152 0 0 1 3.934-2.825z"
/>
<path
fill="#0078d4"
d="M71.175 60.261h-41.29a1.911 1.911 0 0 0-1.305 3.309l26.532 24.764a4.171 4.171 0 0 0 2.846 1.121h23.38z"
/>
<path
fill="url(#ac2a6fc2-ca48-4327-9a3c-d4dcc3256e15)"
d="M33.338 6.544a4.118 4.118 0 0 0-3.943 2.879L4.252 83.917a4.14 4.14 0 0 0 3.908 5.538h20.787a4.443 4.443 0 0 0 3.41-2.9l5.014-14.777 17.91 16.705a4.237 4.237 0 0 0 2.666.972H81.24L71.024 60.261l-29.781.007L59.47 6.544z"
/>
<path
fill="url(#a7fee970-a784-4bb1-af8d-63d18e5f7db9)"
d="M66.595 9.364a4.145 4.145 0 0 0-3.928-2.82H33.648a4.146 4.146 0 0 1 3.928 2.82l25.184 74.62a4.146 4.146 0 0 1-3.928 5.472h29.02a4.146 4.146 0 0 0 3.927-5.472z"
/>
</svg>

View File

@ -1,5 +1,10 @@
<script>
import { redirect } from "@roxi/routify"
import { licensing } from "stores/portal"
$redirect("./auth")
if ($licensing.customAIConfigsEnabled) {
$redirect("./ai")
} else {
$redirect("./auth")
}
</script>

View File

@ -9,7 +9,6 @@ const DEV_PROPS = ["updatedBy", "updatedAt"]
export const INITIAL_APPS_STATE = {
apps: [],
sortBy: "name",
}
export class AppsStore extends BudiStore {
@ -53,6 +52,15 @@ export class AppsStore extends BudiStore {
...state,
sortBy,
}))
this.updateUserSort(sortBy)
}
async updateUserSort(sortBy) {
try {
await auth.updateSelf({ appSort: sortBy })
} catch (err) {
console.error("couldn't save user sort: ", err)
}
}
async load() {
@ -140,43 +148,50 @@ export class AppsStore extends BudiStore {
export const appsStore = new AppsStore()
// Centralise any logic that enriches the apps list
export const enrichedApps = derived([appsStore, auth], ([$store, $auth]) => {
const enrichedApps = $store.apps
? $store.apps.map(app => ({
...app,
deployed: app.status === AppStatus.DEPLOYED,
lockedYou: app.lockedBy && app.lockedBy.email === $auth.user?.email,
lockedOther: app.lockedBy && app.lockedBy.email !== $auth.user?.email,
favourite: $auth.user?.appFavourites?.includes(app.appId),
}))
: []
export const sortBy = derived([appsStore, auth], ([$store, $auth]) => {
return $store.sortBy || $auth.user?.appSort || "name"
})
if ($store.sortBy === "status") {
return enrichedApps.sort((a, b) => {
if (a.favourite === b.favourite) {
if (a.status === b.status) {
// Centralise any logic that enriches the apps list
export const enrichedApps = derived(
[appsStore, auth, sortBy],
([$store, $auth, $sortBy]) => {
const enrichedApps = $store.apps
? $store.apps.map(app => ({
...app,
deployed: app.status === AppStatus.DEPLOYED,
lockedYou: app.lockedBy && app.lockedBy.email === $auth.user?.email,
lockedOther: app.lockedBy && app.lockedBy.email !== $auth.user?.email,
favourite: $auth.user?.appFavourites?.includes(app.appId),
}))
: []
if ($sortBy === "status") {
return enrichedApps.sort((a, b) => {
if (a.favourite === b.favourite) {
if (a.status === b.status) {
return a.name?.toLowerCase() < b.name?.toLowerCase() ? -1 : 1
}
return a.status === AppStatus.DEPLOYED ? -1 : 1
}
return a.favourite ? -1 : 1
})
} else if ($sortBy === "updated") {
return enrichedApps?.sort((a, b) => {
if (a.favourite === b.favourite) {
const aUpdated = a.updatedAt || "9999"
const bUpdated = b.updatedAt || "9999"
return aUpdated < bUpdated ? 1 : -1
}
return a.favourite ? -1 : 1
})
} else {
return enrichedApps?.sort((a, b) => {
if (a.favourite === b.favourite) {
return a.name?.toLowerCase() < b.name?.toLowerCase() ? -1 : 1
}
return a.status === AppStatus.DEPLOYED ? -1 : 1
}
return a.favourite ? -1 : 1
})
} else if ($store.sortBy === "updated") {
return enrichedApps?.sort((a, b) => {
if (a.favourite === b.favourite) {
const aUpdated = a.updatedAt || "9999"
const bUpdated = b.updatedAt || "9999"
return aUpdated < bUpdated ? 1 : -1
}
return a.favourite ? -1 : 1
})
} else {
return enrichedApps?.sort((a, b) => {
if (a.favourite === b.favourite) {
return a.name?.toLowerCase() < b.name?.toLowerCase() ? -1 : 1
}
return a.favourite ? -1 : 1
})
return a.favourite ? -1 : 1
})
}
}
})
)

View File

@ -3,7 +3,7 @@ import { writable } from "svelte/store"
export { organisation } from "./organisation"
export { users } from "./users"
export { admin } from "./admin"
export { appsStore, enrichedApps } from "./apps"
export { appsStore, enrichedApps, sortBy } from "./apps"
export { email } from "./email"
export { auth } from "./auth"
export { oidc } from "./oidc"

View File

@ -4,6 +4,8 @@
import { GridRowHeight, GridColumns } from "constants"
import { memo } from "@budibase/frontend-core"
export let onClick
const component = getContext("component")
const { styleable, builderStore } = getContext("sdk")
const context = getContext("context")
@ -121,15 +123,19 @@
})
</script>
<!-- svelte-ignore a11y-no-static-element-interactions -->
<!-- svelte-ignore a11y-click-events-have-key-events -->
<div
bind:this={ref}
class="grid"
class:mobile
class:clickable={!!onClick}
bind:clientWidth={width}
bind:clientHeight={height}
use:styleable={$styles}
data-cols={GridColumns}
data-col-size={colSize}
on:click={onClick}
>
{#if inBuilder}
<div class="underlay">
@ -176,6 +182,9 @@
.placeholder.first-col {
border-left: 1px solid var(--spectrum-global-color-gray-900);
}
.clickable {
cursor: pointer;
}
/* Highlight grid lines when resizing children */
:global(.grid.highlight > .underlay) {

View File

@ -2,7 +2,7 @@
import { getContext, onDestroy } from "svelte"
import { Table } from "@budibase/bbui"
import SlotRenderer from "./SlotRenderer.svelte"
import { canBeSortColumn } from "@budibase/shared-core"
import { canBeSortColumn } from "@budibase/frontend-core"
import Provider from "components/context/Provider.svelte"
export let dataProvider
@ -146,7 +146,7 @@
return
}
newSchema[columnName] = schema[columnName]
if (!canBeSortColumn(schema[columnName].type)) {
if (!canBeSortColumn(schema[columnName])) {
newSchema[columnName].sortable = false
}

View File

@ -63,7 +63,7 @@
// Look up the component tree and find something that is provided by an
// ancestor that matches our datasource. This is for backwards compatibility
// as previously we could use the "closest" context.
for (let id of path.reverse().slice(1)) {
for (let id of path.toReversed().slice(1)) {
// Check for matching view datasource
if (
dataSource.type === "viewV2" &&

View File

@ -0,0 +1,11 @@
export const buildAIEndpoints = API => ({
/**
* Generates a cron expression from a prompt
*/
generateCronExpression: async ({ prompt }) => {
return await API.post({
url: "/api/ai/cron",
body: { prompt },
})
},
})

View File

@ -2,6 +2,7 @@ import { Helpers } from "@budibase/bbui"
import { Header } from "@budibase/shared-core"
import { ApiVersion } from "../constants"
import { buildAnalyticsEndpoints } from "./analytics"
import { buildAIEndpoints } from "./ai"
import { buildAppEndpoints } from "./app"
import { buildAttachmentEndpoints } from "./attachments"
import { buildAuthEndpoints } from "./auth"
@ -268,6 +269,7 @@ export const createAPIClient = config => {
// Attach all endpoints
return {
...API,
...buildAIEndpoints(API),
...buildAnalyticsEndpoints(API),
...buildAppEndpoints(API),
...buildAttachmentEndpoints(API),

View File

@ -40,7 +40,7 @@ export const buildTableEndpoints = API => ({
sortType,
paginate,
}) => {
if (!tableId || !query) {
if (!tableId) {
return {
rows: [],
}

View File

@ -2,6 +2,7 @@
import { onMount, getContext } from "svelte"
import { Dropzone } from "@budibase/bbui"
import GridPopover from "../overlays/GridPopover.svelte"
import { FieldType } from "@budibase/types"
export let value
export let focused = false
@ -81,7 +82,12 @@
>
{#each value || [] as attachment}
{#if isImage(attachment.extension)}
<img src={attachment.url} alt={attachment.extension} />
<img
class:light={!$props?.darkMode &&
schema.type === FieldType.SIGNATURE_SINGLE}
src={attachment.url}
alt={attachment.extension}
/>
{:else}
<div class="file" title={attachment.name}>
{attachment.extension}
@ -140,4 +146,9 @@
width: 320px;
padding: var(--cell-padding);
}
.attachment-cell img.light {
-webkit-filter: invert(100%);
filter: invert(100%);
}
</style>

View File

@ -1,6 +1,6 @@
<script>
import { getContext, onMount, tick } from "svelte"
import { canBeDisplayColumn, canBeSortColumn } from "@budibase/shared-core"
import { canBeSortColumn, canBeDisplayColumn } from "@budibase/frontend-core"
import { Icon, Menu, MenuItem, Modal } from "@budibase/bbui"
import GridCell from "./GridCell.svelte"
import { getColumnIcon } from "../lib/utils"
@ -165,7 +165,17 @@
}
const hideColumn = () => {
datasource.actions.addSchemaMutation(column.name, { visible: false })
const { related } = column
const mutation = { visible: false }
if (!related) {
datasource.actions.addSchemaMutation(column.name, mutation)
} else {
datasource.actions.addSubSchemaMutation(
related.subField,
related.field,
mutation
)
}
datasource.actions.saveSchemaMutations()
open = false
}
@ -347,15 +357,14 @@
<MenuItem
icon="Label"
on:click={makeDisplayColumn}
disabled={column.primaryDisplay ||
!canBeDisplayColumn(column.schema.type)}
disabled={column.primaryDisplay || !canBeDisplayColumn(column.schema)}
>
Use as display column
</MenuItem>
<MenuItem
icon="SortOrderUp"
on:click={sortAscending}
disabled={!canBeSortColumn(column.schema.type) ||
disabled={!canBeSortColumn(column.schema) ||
(column.name === $sort.column && $sort.order === "ascending")}
>
Sort {sortingLabels.ascending}
@ -363,7 +372,7 @@
<MenuItem
icon="SortOrderDown"
on:click={sortDescending}
disabled={!canBeSortColumn(column.schema.type) ||
disabled={!canBeSortColumn(column.schema) ||
(column.name === $sort.column && $sort.order === "descending")}
>
Sort {sortingLabels.descending}

View File

@ -27,9 +27,7 @@
let candidateIndex
let lastSearchId
let searching = false
let container
let anchor
let relationshipFields
$: fieldValue = parseValue(value)
$: oneRowOnly = schema?.relationshipType === "one-to-many"
@ -56,12 +54,6 @@
return acc
}, {})
$: showRelationshipFields =
relationshipFields &&
Object.keys(relationshipFields).length &&
focused &&
!isOpen
const parseValue = value => {
if (Array.isArray(value) && value.every(x => x?._id)) {
return value
@ -242,14 +234,6 @@
return value
}
const displayRelationshipFields = relationship => {
relationshipFields = relationFields[relationship._id]
}
const hideRelationshipFields = () => {
relationshipFields = undefined
}
onMount(() => {
api = {
focus: open,
@ -269,7 +253,7 @@
style="--color:{color};"
bind:this={anchor}
>
<div class="container" bind:this={container}>
<div class="container">
<div
class="values"
class:wrap={editable || contentLines > 1}
@ -281,9 +265,7 @@
<div
class="badge"
class:extra-info={!!relationFields[relationship._id]}
on:mouseover={() => displayRelationshipFields(relationship)}
on:focus={() => {}}
on:mouseleave={() => hideRelationshipFields()}
>
<span>
{readable(
@ -358,21 +340,6 @@
</GridPopover>
{/if}
{#if showRelationshipFields}
<GridPopover {anchor} minWidth={300} maxWidth={400}>
<div class="relationship-fields">
{#each Object.entries(relationshipFields) as [fieldName, fieldValue]}
<div class="relationship-field-name">
{fieldName}
</div>
<div class="relationship-field-value">
{fieldValue}
</div>
{/each}
</div>
</GridPopover>
{/if}
<style>
.wrapper {
flex: 1 1 auto;
@ -539,25 +506,4 @@
.search :global(.spectrum-Form-item) {
flex: 1 1 auto;
}
.relationship-fields {
margin: var(--spacing-m) var(--spacing-l);
display: grid;
grid-template-columns: minmax(auto, 50%) auto;
grid-row-gap: var(--spacing-m);
grid-column-gap: var(--spacing-m);
}
.relationship-field-name {
text-transform: uppercase;
color: var(--spectrum-global-color-gray-600);
font-size: var(--font-size-xs);
}
.relationship-field-value {
overflow: hidden;
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 3;
line-clamp: 3;
}
</style>

View File

@ -4,13 +4,15 @@
import ColumnsSettingContent from "./ColumnsSettingContent.svelte"
import { FieldPermissions } from "../../../constants"
const { columns, datasource } = getContext("grid")
const { tableColumns, datasource } = getContext("grid")
let open = false
let anchor
$: anyRestricted = $columns.filter(col => !col.visible || col.readonly).length
$: text = anyRestricted ? `Columns: (${anyRestricted} restricted)` : "Columns"
$: anyRestricted = $tableColumns.filter(
col => !col.visible || col.readonly
).length
$: text = anyRestricted ? `Columns (${anyRestricted} restricted)` : "Columns"
$: permissions =
$datasource.type === "viewV2"
? [
@ -28,12 +30,12 @@
size="M"
on:click={() => (open = !open)}
selected={open || anyRestricted}
disabled={!$columns.length}
disabled={!$tableColumns.length}
>
{text}
</ActionButton>
</div>
<Popover bind:open {anchor} align="left">
<ColumnsSettingContent columns={$columns} {permissions} />
<ColumnsSettingContent columns={$tableColumns} {permissions} />
</Popover>

View File

@ -122,8 +122,10 @@
label: name,
schema: {
type: column.type,
subtype: column.subtype,
visible: column.visible,
readonly: column.readonly,
icon: column.icon,
},
}
})

View File

@ -1,7 +1,7 @@
<script>
import { getContext } from "svelte"
import { ActionButton, Popover, Select } from "@budibase/bbui"
import { canBeSortColumn } from "@budibase/shared-core"
import { canBeSortColumn } from "@budibase/frontend-core"
const { sort, columns } = getContext("grid")
@ -13,8 +13,9 @@
label: col.label || col.name,
value: col.name,
type: col.schema?.type,
related: col.related,
}))
.filter(col => canBeSortColumn(col.type))
.filter(col => canBeSortColumn(col))
$: orderOptions = getOrderOptions($sort.column, columnOptions)
const getOrderOptions = (column, columnOptions) => {

View File

@ -35,5 +35,9 @@ const TypeComponentMap = {
[FieldType.BB_REFERENCE_SINGLE]: BBReferenceSingleCell,
}
export const getCellRenderer = column => {
return TypeComponentMap[column?.schema?.type] || TextCell
return (
TypeComponentMap[column?.schema?.cellRenderType] ||
TypeComponentMap[column?.schema?.type] ||
TextCell
)
}

View File

@ -19,6 +19,10 @@ export const getCellID = (rowId, fieldName) => {
}
export const getColumnIcon = column => {
if (column.schema.icon) {
return column.schema.icon
}
if (column.schema.autocolumn) {
return "MagicWand"
}

View File

@ -42,6 +42,11 @@ export const deriveStores = context => {
return map
})
// Derived list of columns which are direct part of the table
const tableColumns = derived(columns, $columns => {
return $columns.filter(col => !col.related)
})
// Derived list of columns which have not been explicitly hidden
const visibleColumns = derived(columns, $columns => {
return $columns.filter(col => col.visible)
@ -64,6 +69,7 @@ export const deriveStores = context => {
})
return {
tableColumns,
displayColumn,
columnLookupMap,
visibleColumns,
@ -73,16 +79,24 @@ export const deriveStores = context => {
}
export const createActions = context => {
const { columns, datasource, schema } = context
const { columns, datasource } = context
// Updates the width of all columns
const changeAllColumnWidths = async width => {
const $schema = get(schema)
let mutations = {}
Object.keys($schema).forEach(field => {
mutations[field] = { width }
const $columns = get(columns)
$columns.forEach(column => {
const { related } = column
const mutation = { width }
if (!related) {
datasource.actions.addSchemaMutation(column.name, mutation)
} else {
datasource.actions.addSubSchemaMutation(
related.subField,
related.field,
mutation
)
}
})
datasource.actions.addSchemaMutations(mutations)
await datasource.actions.saveSchemaMutations()
}
@ -136,7 +150,7 @@ export const initialise = context => {
.map(field => {
const fieldSchema = $enrichedSchema[field]
const oldColumn = $columns?.find(col => col.name === field)
let column = {
const column = {
name: field,
label: fieldSchema.displayName || field,
schema: fieldSchema,
@ -145,6 +159,7 @@ export const initialise = context => {
readonly: fieldSchema.readonly,
order: fieldSchema.order ?? oldColumn?.order,
conditions: fieldSchema.conditions,
related: fieldSchema.related,
}
// Override a few properties for primary display
if (field === primaryDisplay) {

View File

@ -1,6 +1,6 @@
import { derived, get } from "svelte/store"
import { getDatasourceDefinition, getDatasourceSchema } from "../../../fetch"
import { memo } from "../../../utils"
import { enrichSchemaWithRelColumns, memo } from "../../../utils"
export const createStores = () => {
const definition = memo(null)
@ -53,10 +53,13 @@ export const deriveStores = context => {
if (!$schema) {
return null
}
let enrichedSchema = {}
Object.keys($schema).forEach(field => {
const schemaWithRelatedColumns = enrichSchemaWithRelColumns($schema)
const enrichedSchema = {}
Object.keys(schemaWithRelatedColumns).forEach(field => {
enrichedSchema[field] = {
...$schema[field],
...schemaWithRelatedColumns[field],
...$schemaOverrides?.[field],
...$schemaMutations[field],
}
@ -202,24 +205,6 @@ export const createActions = context => {
})
}
// Adds schema mutations for multiple fields at once
const addSchemaMutations = mutations => {
const fields = Object.keys(mutations || {})
if (!fields.length) {
return
}
schemaMutations.update($schemaMutations => {
let newSchemaMutations = { ...$schemaMutations }
fields.forEach(field => {
newSchemaMutations[field] = {
...newSchemaMutations[field],
...mutations[field],
}
})
return newSchemaMutations
})
}
// Saves schema changes to the server, if possible
const saveSchemaMutations = async () => {
// If we can't save schema changes then we just want to keep this in memory
@ -309,7 +294,6 @@ export const createActions = context => {
changePrimaryDisplay,
addSchemaMutation,
addSubSchemaMutation,
addSchemaMutations,
saveSchemaMutations,
resetSchemaMutations,
},

View File

@ -120,25 +120,29 @@ export const initialise = context => {
// When sorting changes, ensure view definition is kept up to date
unsubscribers.push(
sort.subscribe(async $sort => {
// Ensure we're updating the correct view
const $view = get(definition)
if ($view?.id !== $datasource.id) {
return
}
// Skip if nothing actually changed
if (
$sort?.column === $view.sort?.field &&
$sort?.order === $view.sort?.order
) {
return
}
// If we can mutate schema then update the view definition
if (get(config).canSaveSchema) {
// Ensure we're updating the correct view
const $view = get(definition)
if ($view?.id !== $datasource.id) {
return
}
if (
$sort?.column !== $view.sort?.field ||
$sort?.order !== $view.sort?.order
) {
await datasource.actions.saveDefinition({
...$view,
sort: {
field: $sort.column,
order: $sort.order || "ascending",
},
})
}
await datasource.actions.saveDefinition({
...$view,
sort: {
field: $sort.column,
order: $sort.order || "ascending",
},
})
}
// Also update the fetch to ensure the new sort is respected.

View File

@ -214,11 +214,20 @@ export const createActions = context => {
})
// Extract new orders as schema mutations
let mutations = {}
get(columns).forEach((column, idx) => {
mutations[column.name] = { order: idx }
const { related } = column
const mutation = { order: idx }
if (!related) {
datasource.actions.addSchemaMutation(column.name, mutation)
} else {
datasource.actions.addSubSchemaMutation(
related.subField,
related.field,
mutation
)
}
})
datasource.actions.addSchemaMutations(mutations)
await datasource.actions.saveSchemaMutations()
}

View File

@ -38,6 +38,7 @@ export const createActions = context => {
initialWidth: column.width,
initialMouseX: x,
column: column.name,
related: column.related,
})
// Add mouse event listeners to handle resizing
@ -50,7 +51,7 @@ export const createActions = context => {
// Handler for moving the mouse to resize columns
const onResizeMouseMove = e => {
const { initialMouseX, initialWidth, width, column } = get(resize)
const { initialMouseX, initialWidth, width, column, related } = get(resize)
const { x } = parseEventLocation(e)
const dx = x - initialMouseX
const newWidth = Math.round(Math.max(MinColumnWidth, initialWidth + dx))
@ -61,7 +62,13 @@ export const createActions = context => {
}
// Update column state
datasource.actions.addSchemaMutation(column, { width })
if (!related) {
datasource.actions.addSchemaMutation(column, { width })
} else {
datasource.actions.addSubSchemaMutation(related.subField, related.field, {
width,
})
}
// Update state
resize.update(state => ({

View File

@ -6,6 +6,7 @@ import { tick } from "svelte"
import { Helpers } from "@budibase/bbui"
import { sleep } from "../../../utils/utils"
import { FieldType } from "@budibase/types"
import { getRelatedTableValues } from "../../../utils"
export const createStores = () => {
const rows = writable([])
@ -42,15 +43,26 @@ export const createStores = () => {
}
export const deriveStores = context => {
const { rows } = context
const { rows, enrichedSchema } = context
// Enrich rows with an index property and any pending changes
const enrichedRows = derived(rows, $rows => {
return $rows.map((row, idx) => ({
...row,
__idx: idx,
}))
})
const enrichedRows = derived(
[rows, enrichedSchema],
([$rows, $enrichedSchema]) => {
const customColumns = Object.values($enrichedSchema || {}).filter(
f => f.related
)
return $rows.map((row, idx) => ({
...row,
__idx: idx,
...customColumns.reduce((map, column) => {
const fromField = $enrichedSchema[column.related.field]
map[column.name] = getRelatedTableValues(row, column, fromField)
return map
}, {}),
}))
}
)
// Generate a lookup map to quick find a row by ID
const rowLookupMap = derived(enrichedRows, $enrichedRows => {

View File

@ -10,3 +10,5 @@ export { createWebsocket } from "./websocket"
export * from "./download"
export * from "./theme"
export * from "./settings"
export * from "./relatedColumns"
export * from "./table"

View File

@ -0,0 +1,107 @@
import { FieldType, RelationshipType } from "@budibase/types"
import { Helpers } from "@budibase/bbui"
const columnTypeManyTypeOverrides = {
[FieldType.DATETIME]: FieldType.STRING,
[FieldType.BOOLEAN]: FieldType.STRING,
[FieldType.SIGNATURE_SINGLE]: FieldType.ATTACHMENTS,
}
const columnTypeManyParser = {
[FieldType.DATETIME]: (value, field) => {
function parseDate(value) {
const { timeOnly, dateOnly, ignoreTimezones } = field || {}
const enableTime = !dateOnly
const parsedValue = Helpers.parseDate(value, {
timeOnly,
enableTime,
ignoreTimezones,
})
const parsed = Helpers.getDateDisplayValue(parsedValue, {
enableTime,
timeOnly,
})
return parsed
}
return value?.map(v => parseDate(v))
},
[FieldType.BOOLEAN]: value => value?.map(v => !!v),
[FieldType.BB_REFERENCE_SINGLE]: value => [
...new Map(value.map(i => [i._id, i])).values(),
],
[FieldType.BB_REFERENCE]: value => [
...new Map(value.map(i => [i._id, i])).values(),
],
[FieldType.ARRAY]: value => Array.from(new Set(value)),
}
export function enrichSchemaWithRelColumns(schema) {
if (!schema) {
return
}
const result = Object.keys(schema).reduce((result, fieldName) => {
const field = schema[fieldName]
result[fieldName] = field
if (field.visible !== false && field.columns) {
const fromSingle =
field?.relationshipType === RelationshipType.ONE_TO_MANY
for (const relColumn of Object.keys(field.columns)) {
const relField = field.columns[relColumn]
if (!relField.visible) {
continue
}
const name = `${field.name}.${relColumn}`
result[name] = {
...relField,
name,
related: { field: fieldName, subField: relColumn },
cellRenderType:
(!fromSingle && columnTypeManyTypeOverrides[relField.type]) ||
relField.type,
}
}
}
return result
}, {})
return result
}
export function getRelatedTableValues(row, field, fromField) {
const fromSingle =
fromField?.relationshipType === RelationshipType.ONE_TO_MANY
let result = ""
if (fromSingle) {
result = row[field.related.field]?.[0]?.[field.related.subField]
} else {
const parser = columnTypeManyParser[field.type] || (value => value)
result = parser(
row[field.related.field]
?.flatMap(r => r[field.related.subField])
?.filter(i => i !== undefined && i !== null),
field
)
if (
[
FieldType.STRING,
FieldType.NUMBER,
FieldType.BIGINT,
FieldType.BOOLEAN,
FieldType.DATETIME,
FieldType.LONGFORM,
FieldType.BARCODEQR,
].includes(field.type)
) {
result = result?.join(", ")
}
}
return result
}

View File

@ -0,0 +1,27 @@
import * as sharedCore from "@budibase/shared-core"
export function canBeDisplayColumn(column) {
if (!sharedCore.canBeDisplayColumn(column.type)) {
return false
}
if (column.related) {
// If it's a related column (only available in the frontend), don't allow using it as display column
return false
}
return true
}
export function canBeSortColumn(column) {
if (!sharedCore.canBeSortColumn(column.type)) {
return false
}
if (column.related) {
// If it's a related column (only available in the frontend), don't allow using it as display column
return false
}
return true
}

@ -1 +1 @@
Subproject commit e2fe0f9cc856b4ee1a97df96d623b2d87d4e8733
Subproject commit fc4c7f4925139af078480217965c3d6338dc0a7f

View File

@ -1,9 +1,6 @@
import { permissions, roles, context } from "@budibase/backend-core"
import {
UserCtx,
Database,
Role,
PermissionLevel,
GetResourcePermsResponse,
ResourcePermissionInfo,
GetDependantResourcesResponse,
@ -11,108 +8,17 @@ import {
AddPermissionRequest,
RemovePermissionRequest,
RemovePermissionResponse,
FetchResourcePermissionInfoResponse,
} from "@budibase/types"
import { getRoleParams } from "../../db/utils"
import {
CURRENTLY_SUPPORTED_LEVELS,
getBasePermissions,
} from "../../utilities/security"
import { removeFromArray } from "../../utilities"
import sdk from "../../sdk"
const enum PermissionUpdateType {
REMOVE = "remove",
ADD = "add",
}
import { PermissionUpdateType } from "../../sdk/app/permissions"
const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS
// utility function to stop this repetition - permissions always stored under roles
async function getAllDBRoles(db: Database) {
const body = await db.allDocs<Role>(
getRoleParams(null, {
include_docs: true,
})
)
return body.rows.map(row => row.doc!)
}
async function updatePermissionOnRole(
{
roleId,
resourceId,
level,
}: { roleId: string; resourceId: string; level: PermissionLevel },
updateType: PermissionUpdateType
) {
const db = context.getAppDB()
const remove = updateType === PermissionUpdateType.REMOVE
const isABuiltin = roles.isBuiltin(roleId)
const dbRoleId = roles.getDBRoleID(roleId)
const dbRoles = await getAllDBRoles(db)
const docUpdates: Role[] = []
// the permission is for a built in, make sure it exists
if (isABuiltin && !dbRoles.some(role => role._id === dbRoleId)) {
const builtin = roles.getBuiltinRoles()[roleId]
builtin._id = roles.getDBRoleID(builtin._id!)
dbRoles.push(builtin)
}
// now try to find any roles which need updated, e.g. removing the
// resource from another role and then adding to the new role
for (let role of dbRoles) {
let updated = false
const rolePermissions: Record<string, PermissionLevel[]> = role.permissions
? role.permissions
: {}
// make sure its an array, also handle migrating
if (
!rolePermissions[resourceId] ||
!Array.isArray(rolePermissions[resourceId])
) {
rolePermissions[resourceId] =
typeof rolePermissions[resourceId] === "string"
? [rolePermissions[resourceId] as unknown as PermissionLevel]
: []
}
// handle the removal/updating the role which has this permission first
// the updating (role._id !== dbRoleId) is required because a resource/level can
// only be permitted in a single role (this reduces hierarchy confusion and simplifies
// the general UI for this, rather than needing to show everywhere it is used)
if (
(role._id !== dbRoleId || remove) &&
rolePermissions[resourceId].indexOf(level) !== -1
) {
removeFromArray(rolePermissions[resourceId], level)
updated = true
}
// handle the adding, we're on the correct role, at it to this
if (!remove && role._id === dbRoleId) {
const set = new Set(rolePermissions[resourceId])
rolePermissions[resourceId] = [...set.add(level)]
updated = true
}
// handle the update, add it to bulk docs to perform at end
if (updated) {
role.permissions = rolePermissions
docUpdates.push(role)
}
}
const response = await db.bulkDocs(docUpdates)
return response.map(resp => {
const version = docUpdates.find(role => role._id === resp.id)?.version
const _id = roles.getExternalRoleID(resp.id, version)
return {
_id,
rev: resp.rev,
error: resp.error,
reason: resp.reason,
}
})
}
export function fetchBuiltin(ctx: UserCtx) {
ctx.body = Object.values(permissions.getBuiltinPermissions())
}
@ -122,10 +28,12 @@ export function fetchLevels(ctx: UserCtx) {
ctx.body = SUPPORTED_LEVELS
}
export async function fetch(ctx: UserCtx) {
export async function fetch(
ctx: UserCtx<void, FetchResourcePermissionInfoResponse>
) {
const db = context.getAppDB()
const dbRoles: Role[] = await getAllDBRoles(db)
let permissions: any = {}
const dbRoles = await sdk.permissions.getAllDBRoles(db)
let permissions: Record<string, Record<string, string>> = {}
// create an object with structure role ID -> resource ID -> level
for (let role of dbRoles) {
if (!role.permissions) {
@ -137,13 +45,13 @@ export async function fetch(ctx: UserCtx) {
}
for (let [resource, levelArr] of Object.entries(role.permissions)) {
const levels: string[] = Array.isArray(levelArr) ? levelArr : [levelArr]
const perms: Record<string, string> = {}
const perms: Record<string, string> = permissions[resource] || {}
levels.forEach(level => (perms[level] = roleId!))
permissions[resource] = perms
}
}
// apply the base permissions
const finalPermissions: Record<string, Record<string, string>> = {}
const finalPermissions: FetchResourcePermissionInfoResponse = {}
for (let [resource, permission] of Object.entries(permissions)) {
const basePerms = getBasePermissions(resource)
finalPermissions[resource] = Object.assign(basePerms, permission)
@ -186,12 +94,17 @@ export async function getDependantResources(
export async function addPermission(ctx: UserCtx<void, AddPermissionResponse>) {
const params: AddPermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD)
await sdk.permissions.updatePermissionOnRole(params, PermissionUpdateType.ADD)
ctx.status = 200
}
export async function removePermission(
ctx: UserCtx<void, RemovePermissionResponse>
) {
const params: RemovePermissionRequest = ctx.params
ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE)
await sdk.permissions.updatePermissionOnRole(
params,
PermissionUpdateType.REMOVE
)
ctx.status = 200
}

View File

@ -1,8 +1,10 @@
import dayjs from "dayjs"
import {
Aggregation,
AutoFieldSubType,
AutoReason,
Datasource,
DatasourcePlusQueryResponse,
FieldSchema,
FieldType,
FilterType,
@ -19,6 +21,7 @@ import {
SortJson,
SortType,
Table,
ViewV2,
} from "@budibase/types"
import {
breakExternalTableId,
@ -46,7 +49,7 @@ import { db as dbCore } from "@budibase/backend-core"
import sdk from "../../../sdk"
import env from "../../../environment"
import { makeExternalQuery } from "../../../integrations/base/query"
import { dataFilters } from "@budibase/shared-core"
import { dataFilters, helpers } from "@budibase/shared-core"
export interface ManyRelationship {
tableId?: string
@ -159,17 +162,41 @@ function isEditableColumn(column: FieldSchema) {
export class ExternalRequest<T extends Operation> {
private readonly operation: T
private readonly tableId: string
private datasource?: Datasource
private tables: { [key: string]: Table } = {}
private readonly source: Table | ViewV2
private datasource: Datasource
constructor(operation: T, tableId: string, datasource?: Datasource) {
this.operation = operation
this.tableId = tableId
this.datasource = datasource
if (datasource && datasource.entities) {
this.tables = datasource.entities
public static async for<T extends Operation>(
operation: T,
source: Table | ViewV2,
opts: { datasource?: Datasource } = {}
) {
if (!opts.datasource) {
if (sdk.views.isView(source)) {
const table = await sdk.views.getTable(source.id)
opts.datasource = await sdk.datasources.get(table.sourceId!)
} else {
opts.datasource = await sdk.datasources.get(source.sourceId!)
}
}
return new ExternalRequest(operation, source, opts.datasource)
}
private get tables(): { [key: string]: Table } {
if (!this.datasource.entities) {
throw new Error("Datasource does not have entities")
}
return this.datasource.entities
}
private constructor(
operation: T,
source: Table | ViewV2,
datasource: Datasource
) {
this.operation = operation
this.source = source
this.datasource = datasource
}
private prepareFilters(
@ -243,18 +270,13 @@ export class ExternalRequest<T extends Operation> {
}
}
private async removeManyToManyRelationships(
rowId: string,
table: Table,
colName: string
) {
private async removeManyToManyRelationships(rowId: string, table: Table) {
const tableId = table._id!
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.DELETE),
body: { [colName]: null },
filters,
meta: {
table,
@ -265,13 +287,18 @@ export class ExternalRequest<T extends Operation> {
}
}
private async removeOneToManyRelationships(rowId: string, table: Table) {
private async removeOneToManyRelationships(
rowId: string,
table: Table,
colName: string
) {
const tableId = table._id!
const filters = this.prepareFilters(rowId, {}, table)
// safety check, if there are no filters on deletion bad things happen
if (Object.keys(filters).length !== 0) {
return getDatasourceAndQuery({
endpoint: getEndpoint(tableId, Operation.UPDATE),
body: { [colName]: null },
filters,
meta: {
table,
@ -290,20 +317,6 @@ export class ExternalRequest<T extends Operation> {
return this.tables[tableName]
}
// seeds the object with table and datasource information
async retrieveMetadata(
datasourceId: string
): Promise<{ tables: Record<string, Table>; datasource: Datasource }> {
if (!this.datasource) {
this.datasource = await sdk.datasources.get(datasourceId)
if (!this.datasource || !this.datasource.entities) {
throw "No tables found, fetch tables before query."
}
this.tables = this.datasource.entities
}
return { tables: this.tables, datasource: this.datasource }
}
async getRow(table: Table, rowId: string): Promise<Row> {
const response = await getDatasourceAndQuery({
endpoint: getEndpoint(table._id!, Operation.READ),
@ -545,8 +558,9 @@ export class ExternalRequest<T extends Operation> {
return matchesPrimaryLink
}
const matchesSecondayLink = row[linkSecondary] === body?.[linkSecondary]
return matchesPrimaryLink && matchesSecondayLink
const matchesSecondaryLink =
row[linkSecondary] === body?.[linkSecondary]
return matchesPrimaryLink && matchesSecondaryLink
}
const existingRelationship = rows.find((row: { [key: string]: any }) =>
@ -583,8 +597,8 @@ export class ExternalRequest<T extends Operation> {
for (let row of rows) {
const rowId = generateIdForRow(row, table)
const promise: Promise<any> = isMany
? this.removeManyToManyRelationships(rowId, table, colName)
: this.removeOneToManyRelationships(rowId, table)
? this.removeManyToManyRelationships(rowId, table)
: this.removeOneToManyRelationships(rowId, table, colName)
if (promise) {
promises.push(promise)
}
@ -607,36 +621,28 @@ export class ExternalRequest<T extends Operation> {
rows.map(row => {
const rowId = generateIdForRow(row, table)
return isMany
? this.removeManyToManyRelationships(
? this.removeManyToManyRelationships(rowId, table)
: this.removeOneToManyRelationships(
rowId,
table,
relationshipColumn.fieldName
)
: this.removeOneToManyRelationships(rowId, table)
})
)
}
}
async run(config: RunConfig): Promise<ExternalRequestReturnType<T>> {
const { operation, tableId } = this
if (!tableId) {
throw new Error("Unable to run without a table ID")
}
let { datasourceId, tableName } = breakExternalTableId(tableId)
let datasource = this.datasource
if (!datasource) {
const { datasource: ds } = await this.retrieveMetadata(datasourceId)
datasource = ds
}
const tables = this.tables
const table = tables[tableName]
let isSql = isSQL(datasource)
if (!table) {
throw new Error(
`Unable to process query, table "${tableName}" not defined.`
)
const { operation } = this
let table: Table
if (sdk.views.isView(this.source)) {
table = await sdk.views.getTable(this.source.id)
} else {
table = this.source
}
let isSql = isSQL(this.datasource)
// look for specific components of config which may not be considered acceptable
let { id, row, filters, sort, paginate, rows } = cleanupConfig(
config,
@ -665,6 +671,7 @@ export class ExternalRequest<T extends Operation> {
config.includeSqlRelationships === IncludeRelationship.INCLUDE
// clean up row on ingress using schema
const unprocessedRow = config.row
const processed = this.inputProcessing(row, table)
row = processed.row
let manyRelationships = processed.manyRelationships
@ -679,25 +686,39 @@ export class ExternalRequest<T extends Operation> {
}
}
}
if (
operation === Operation.DELETE &&
(filters == null || Object.keys(filters).length === 0)
) {
throw "Deletion must be filtered"
}
let aggregations: Aggregation[] = []
if (sdk.views.isView(this.source)) {
const calculationFields = helpers.views.calculationFields(this.source)
for (const [key, field] of Object.entries(calculationFields)) {
aggregations.push({
...field,
name: key,
})
}
}
let json: QueryJson = {
endpoint: {
datasourceId: datasourceId!,
entityId: tableName,
datasourceId: this.datasource._id!,
entityId: table.name,
operation,
},
resource: {
// have to specify the fields to avoid column overlap (for SQL)
fields: isSql
? buildSqlFieldList(table, this.tables, {
? await buildSqlFieldList(this.source, this.tables, {
relationships: incRelationships,
})
: [],
aggregations,
},
filters,
sort,
@ -714,7 +735,7 @@ export class ExternalRequest<T extends Operation> {
},
meta: {
table,
tables: tables,
tables: this.tables,
},
}
@ -725,9 +746,20 @@ export class ExternalRequest<T extends Operation> {
// aliasing can be disabled fully if desired
const aliasing = new sdk.rows.AliasTables(Object.keys(this.tables))
let response = env.SQL_ALIASING_DISABLE
? await getDatasourceAndQuery(json)
: await aliasing.queryWithAliasing(json, makeExternalQuery)
let response: DatasourcePlusQueryResponse
// there's a chance after input processing nothing needs updated, so pass over the call
// we might still need to perform other operations like updating the foreign keys on other rows
if (
this.operation === Operation.UPDATE &&
Object.keys(row || {}).length === 0 &&
unprocessedRow
) {
response = [unprocessedRow]
} else {
response = env.SQL_ALIASING_DISABLE
? await getDatasourceAndQuery(json)
: await aliasing.queryWithAliasing(json, makeExternalQuery)
}
// if it's a counting operation there will be no more processing, just return the number
if (this.operation === Operation.COUNT) {
@ -745,7 +777,7 @@ export class ExternalRequest<T extends Operation> {
}
const output = await sqlOutputProcessing(
response,
table,
this.source,
this.tables,
relationships
)

View File

@ -17,6 +17,7 @@ import {
Row,
Table,
UserCtx,
ViewV2,
} from "@budibase/types"
import sdk from "../../../sdk"
import * as utils from "./utils"
@ -26,42 +27,50 @@ import {
} from "../../../utilities/rowProcessor"
import { cloneDeep } from "lodash"
import { generateIdForRow } from "./utils"
import { helpers } from "@budibase/shared-core"
import { HTTPError } from "@budibase/backend-core"
export async function handleRequest<T extends Operation>(
operation: T,
tableId: string,
source: Table | ViewV2,
opts?: RunConfig
): Promise<ExternalRequestReturnType<T>> {
return new ExternalRequest<T>(operation, tableId, opts?.datasource).run(
opts || {}
)
return (
await ExternalRequest.for<T>(operation, source, {
datasource: opts?.datasource,
})
).run(opts || {})
}
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
ctx.throw(400, "Cannot update rows through a calculation view")
}
const table = await utils.getTableFromSource(source)
const { _id, ...rowData } = ctx.request.body
const table = await sdk.tables.getTable(tableId)
const { row: dataToUpdate } = await inputProcessing(
const dataToUpdate = await inputProcessing(
ctx.user?._id,
cloneDeep(table),
cloneDeep(source),
rowData
)
const validateResult = await sdk.rows.utils.validate({
row: dataToUpdate,
tableId,
source,
})
if (!validateResult.valid) {
throw { validation: validateResult.errors }
}
const beforeRow = await sdk.rows.external.getRow(tableId, _id, {
const beforeRow = await sdk.rows.external.getRow(table._id!, _id, {
relationships: true,
})
const response = await handleRequest(Operation.UPDATE, tableId, {
const response = await handleRequest(Operation.UPDATE, source, {
id: breakRowIdField(_id),
row: dataToUpdate,
})
@ -69,17 +78,16 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// The id might have been changed, so the refetching would fail. Recalculating the id just in case
const updatedId =
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
const row = await sdk.rows.external.getRow(tableId, updatedId, {
const row = await sdk.rows.external.getRow(table._id!, updatedId, {
relationships: true,
})
const [enrichedRow, oldRow] = await Promise.all([
outputProcessing(table, row, {
outputProcessing(source, row, {
squash: true,
preserveLinks: true,
fromViewId: viewId,
}),
outputProcessing(table, beforeRow, {
outputProcessing(source, beforeRow, {
squash: true,
preserveLinks: true,
}),
@ -94,9 +102,14 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
}
export async function destroy(ctx: UserCtx) {
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
throw new HTTPError("Cannot delete rows through a calculation view", 400)
}
const _id = ctx.request.body._id
const { row } = await handleRequest(Operation.DELETE, tableId, {
const { row } = await handleRequest(Operation.DELETE, source, {
id: breakRowIdField(_id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
@ -105,11 +118,11 @@ export async function destroy(ctx: UserCtx) {
export async function bulkDestroy(ctx: UserCtx) {
const { rows } = ctx.request.body
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
let promises: Promise<{ row: Row; table: Table }>[] = []
for (let row of rows) {
promises.push(
handleRequest(Operation.DELETE, tableId, {
handleRequest(Operation.DELETE, source, {
id: breakRowIdField(row._id),
includeSqlRelationships: IncludeRelationship.EXCLUDE,
})
@ -124,6 +137,7 @@ export async function bulkDestroy(ctx: UserCtx) {
export async function fetchEnrichedRow(ctx: UserCtx) {
const id = ctx.params.rowId
const source = await utils.getSource(ctx)
const { tableId } = utils.getSourceId(ctx)
const { datasourceId, tableName } = breakExternalTableId(tableId)
const datasource: Datasource = await sdk.datasources.get(datasourceId)
@ -131,7 +145,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
ctx.throw(400, "Datasource has not been configured for plus API.")
}
const tables = datasource.entities
const response = await handleRequest(Operation.READ, tableId, {
const response = await handleRequest(Operation.READ, source, {
id,
datasource,
includeSqlRelationships: IncludeRelationship.INCLUDE,
@ -155,7 +169,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
// don't support composite keys right now
const linkedIds = links.map((link: Row) => breakRowIdField(link._id!)[0])
const primaryLink = linkedTable.primary?.[0] as string
const relatedRows = await handleRequest(Operation.READ, linkedTableId!, {
const relatedRows = await handleRequest(Operation.READ, linkedTable, {
tables,
filters: {
oneOf: {

View File

@ -207,7 +207,7 @@ export async function destroy(ctx: UserCtx<DeleteRowRequest>) {
}
export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
const { tableId } = utils.getSourceId(ctx)
const { tableId, viewId } = utils.getSourceId(ctx)
await context.ensureSnippetContext(true)
@ -222,6 +222,7 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
...ctx.request.body,
query: enrichedQuery,
tableId,
viewId,
}
ctx.status = 200
@ -229,14 +230,15 @@ export async function search(ctx: Ctx<SearchRowRequest, SearchRowResponse>) {
}
export async function validate(ctx: Ctx<Row, ValidateResponse>) {
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
const table = await utils.getTableFromSource(source)
// external tables are hard to validate currently
if (isExternalTableID(tableId)) {
if (isExternalTableID(table._id!)) {
ctx.body = { valid: true, errors: {} }
} else {
ctx.body = await sdk.rows.utils.validate({
row: ctx.request.body,
tableId,
source,
})
}
}

View File

@ -8,7 +8,7 @@ import {
} from "../../../utilities/rowProcessor"
import * as utils from "./utils"
import { cloneDeep } from "lodash/fp"
import { context } from "@budibase/backend-core"
import { context, HTTPError } from "@budibase/backend-core"
import { finaliseRow, updateRelatedFormula } from "./staticFormula"
import {
FieldType,
@ -16,23 +16,32 @@ import {
PatchRowRequest,
PatchRowResponse,
Row,
Table,
UserCtx,
} from "@budibase/types"
import sdk from "../../../sdk"
import { getLinkedTableIDs } from "../../../db/linkedRows/linkUtils"
import { flatten } from "lodash"
import { findRow } from "../../../sdk/app/rows/internal"
import { helpers } from "@budibase/shared-core"
export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
const { tableId, viewId } = utils.getSourceId(ctx)
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
ctx.throw(400, "Cannot update rows through a calculation view")
}
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
const inputs = ctx.request.body
const isUserTable = tableId === InternalTables.USER_METADATA
let oldRow
const dbTable = await sdk.tables.getTable(tableId)
try {
oldRow = await outputProcessing(
dbTable,
await utils.findRow(tableId, inputs._id!)
)
oldRow = await outputProcessing(source, await findRow(tableId, inputs._id!))
} catch (err) {
if (isUserTable) {
// don't include the rev, it'll be the global rev
@ -48,22 +57,15 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
// need to build up full patch fields before coerce
let combinedRow: any = cloneDeep(oldRow)
for (let key of Object.keys(inputs)) {
if (!dbTable.schema[key]) continue
if (!table.schema[key]) continue
combinedRow[key] = inputs[key]
}
// need to copy the table so it can be differenced on way out
const tableClone = cloneDeep(dbTable)
// this returns the table and row incase they have been updated
let { table, row } = await inputProcessing(
ctx.user?._id,
tableClone,
combinedRow
)
let row = await inputProcessing(ctx.user?._id, source, combinedRow)
const validateResult = await sdk.rows.utils.validate({
row,
table,
source,
})
if (!validateResult.valid) {
@ -87,10 +89,8 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
return { row: ctx.body as Row, table, oldRow }
}
const result = await finaliseRow(table, row, {
oldTable: dbTable,
const result = await finaliseRow(source, row, {
updateFormula: true,
fromViewId: viewId,
})
return { ...result, oldRow }
@ -98,15 +98,26 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
export async function destroy(ctx: UserCtx) {
const db = context.getAppDB()
const { tableId } = utils.getSourceId(ctx)
const source = await utils.getSource(ctx)
if (sdk.views.isView(source) && helpers.views.isCalculationView(source)) {
throw new HTTPError("Cannot delete rows through a calculation view", 400)
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
const { _id } = ctx.request.body
let row = await db.get<Row>(_id)
let _rev = ctx.request.body._rev || row._rev
if (row.tableId !== tableId) {
if (row.tableId !== table._id) {
throw "Supplied tableId doesn't match the row's tableId"
}
const table = await sdk.tables.getTable(tableId)
// update the row to include full relationships before deleting them
row = await outputProcessing(table, row, {
squash: false,
@ -116,7 +127,7 @@ export async function destroy(ctx: UserCtx) {
await linkRows.updateLinks({
eventType: linkRows.EventType.ROW_DELETE,
row,
tableId,
tableId: table._id!,
})
// remove any attachments that were on the row from object storage
await AttachmentCleanup.rowDelete(table, [row])
@ -124,7 +135,7 @@ export async function destroy(ctx: UserCtx) {
await updateRelatedFormula(table, row)
let response
if (tableId === InternalTables.USER_METADATA) {
if (table._id === InternalTables.USER_METADATA) {
ctx.params = {
id: _id,
}
@ -186,7 +197,7 @@ export async function fetchEnrichedRow(ctx: UserCtx) {
sdk.tables.getTable(tableId),
linkRows.getLinkDocuments({ tableId, rowId, fieldName }),
])
let row = await utils.findRow(tableId, rowId)
let row = await findRow(tableId, rowId)
row = await outputProcessing(table, row)
const linkVals = links as LinkDocumentValue[]

View File

@ -4,10 +4,11 @@ import {
processFormulas,
} from "../../../utilities/rowProcessor"
import { context } from "@budibase/backend-core"
import { Table, Row, FormulaType, FieldType } from "@budibase/types"
import { Table, Row, FormulaType, FieldType, ViewV2 } from "@budibase/types"
import * as linkRows from "../../../db/linkedRows"
import isEqual from "lodash/isEqual"
import { cloneDeep } from "lodash/fp"
import sdk from "../../../sdk"
/**
* This function runs through a list of enriched rows, looks at the rows which
@ -121,33 +122,26 @@ export async function updateAllFormulasInTable(table: Table) {
* expects the row to be totally enriched/contain all relationships.
*/
export async function finaliseRow(
table: Table,
source: Table | ViewV2,
row: Row,
{
oldTable,
updateFormula,
fromViewId,
}: { oldTable?: Table; updateFormula: boolean; fromViewId?: string } = {
updateFormula: true,
}
opts?: { updateFormula: boolean }
) {
const db = context.getAppDB()
const { updateFormula = true } = opts || {}
const table = sdk.views.isView(source)
? await sdk.views.getTable(source.id)
: source
row.type = "row"
// process the row before return, to include relationships
let enrichedRow = (await outputProcessing(table, cloneDeep(row), {
let enrichedRow = await outputProcessing(source, cloneDeep(row), {
squash: false,
})) as Row
})
// use enriched row to generate formulas for saving, specifically only use as context
row = await processFormulas(table, row, {
dynamic: false,
contextRows: [enrichedRow],
})
// don't worry about rev, tables handle rev/lastID updates
// if another row has been written since processing this will
// handle the auto ID clash
if (oldTable && !isEqual(oldTable, table)) {
await db.put(table)
}
const response = await db.put(row)
// for response, calculate the formulas for the enriched row
enrichedRow._rev = response.rev
@ -158,8 +152,6 @@ export async function finaliseRow(
if (updateFormula) {
await updateRelatedFormula(table, enrichedRow)
}
const squashed = await linkRows.squashLinks(table, enrichedRow, {
fromViewId,
})
const squashed = await linkRows.squashLinks(source, enrichedRow)
return { row: enrichedRow, squashed, table }
}

View File

@ -1,11 +1,19 @@
// need to handle table name + field or just field, depending on if relationships used
import { FieldSchema, FieldType, Row, Table, JsonTypes } from "@budibase/types"
import {
FieldSchema,
FieldType,
Row,
Table,
JsonTypes,
ViewV2,
} from "@budibase/types"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core"
import { generateRowIdField } from "../../../../integrations/utils"
import sdk from "../../../../sdk"
function extractFieldValue({
row,
@ -78,20 +86,30 @@ function fixJsonTypes(row: Row, table: Table) {
return row
}
export function basicProcessing({
export async function basicProcessing({
row,
table,
source,
tables,
isLinked,
sqs,
}: {
row: Row
table: Table
source: Table | ViewV2
tables: Table[]
isLinked: boolean
sqs?: boolean
}): Row {
}): Promise<Row> {
let table: Table
let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
const thisRow: Row = {}
// filter the row down to what is actually the row (not joined)
for (let fieldName of Object.keys(table.schema)) {
let value = extractFieldValue({
@ -108,13 +126,20 @@ export function basicProcessing({
thisRow[fieldName] = value
}
}
if (sdk.views.isView(source)) {
for (const key of Object.keys(helpers.views.calculationFields(source))) {
thisRow[key] = row[key]
}
}
let columns: string[] = Object.keys(table.schema)
if (!sqs) {
if (!sqs && !isCalculationView) {
thisRow._id = generateIdForRow(row, table, isLinked)
thisRow.tableId = table._id
thisRow._rev = "rev"
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
} else {
} else if (!isCalculationView) {
columns = columns.concat(PROTECTED_EXTERNAL_COLUMNS)
for (let internalColumn of [...PROTECTED_INTERNAL_COLUMNS, ...columns]) {
thisRow[internalColumn] = extractFieldValue({
@ -149,28 +174,30 @@ export function basicProcessing({
thisRow[col] = array
// make sure all of them have an _id
const sortField = relatedTable.primaryDisplay || relatedTable.primary![0]!
thisRow[col] = (thisRow[col] as Row[])
.map(relatedRow =>
basicProcessing({
row: relatedRow,
table: relatedTable,
tables,
isLinked: false,
sqs,
})
thisRow[col] = (
await Promise.all(
(thisRow[col] as Row[]).map(relatedRow =>
basicProcessing({
row: relatedRow,
source: relatedTable,
tables,
isLinked: false,
sqs,
})
)
)
.sort((a, b) => {
const aField = a?.[sortField],
bField = b?.[sortField]
if (!aField) {
return 1
} else if (!bField) {
return -1
}
return aField.localeCompare
? aField.localeCompare(bField)
: aField - bField
})
).sort((a, b) => {
const aField = a?.[sortField],
bField = b?.[sortField]
if (!aField) {
return 1
} else if (!bField) {
return -1
}
return aField.localeCompare
? aField.localeCompare(bField)
: aField - bField
})
}
}
return fixJsonTypes(thisRow, table)

View File

@ -7,10 +7,14 @@ import {
ManyToManyRelationshipFieldMetadata,
RelationshipFieldMetadata,
RelationshipsJson,
Row,
Table,
ViewV2,
} from "@budibase/types"
import { breakExternalTableId } from "../../../../integrations/utils"
import { generateJunctionTableID } from "../../../../db/utils"
import sdk from "../../../../sdk"
import { helpers } from "@budibase/shared-core"
type TableMap = Record<string, Table>
@ -108,37 +112,49 @@ export function buildInternalRelationships(
* Creating the specific list of fields that we desire, and excluding the ones that are no use to us
* is more performant and has the added benefit of protecting against this scenario.
*/
export function buildSqlFieldList(
table: Table,
export async function buildSqlFieldList(
source: Table | ViewV2,
tables: TableMap,
opts?: { relationships: boolean }
) {
const { relationships } = opts || {}
function extractRealFields(table: Table, existing: string[] = []) {
return Object.entries(table.schema)
.filter(
([columnName, column]) =>
column.type !== FieldType.LINK &&
column.type !== FieldType.FORMULA &&
!existing.find((field: string) => field === columnName)
!existing.find(
(field: string) => field === `${table.name}.${columnName}`
)
)
.map(column => `${table.name}.${column[0]}`)
.map(([columnName]) => `${table.name}.${columnName}`)
}
let fields = extractRealFields(table)
let fields: string[] = []
if (sdk.views.isView(source)) {
fields = Object.keys(helpers.views.basicFields(source))
} else {
fields = extractRealFields(source)
}
let table: Table
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
} else {
table = source
}
for (let field of Object.values(table.schema)) {
if (
field.type !== FieldType.LINK ||
!opts?.relationships ||
!field.tableId
) {
if (field.type !== FieldType.LINK || !relationships || !field.tableId) {
continue
}
const { tableName: linkTableName } = breakExternalTableId(field.tableId)
const linkTable = tables[linkTableName]
if (linkTable) {
const linkedFields = extractRealFields(linkTable, fields)
fields = fields.concat(linkedFields)
const { tableName } = breakExternalTableId(field.tableId)
if (tables[tableName]) {
fields = fields.concat(extractRealFields(tables[tableName], fields))
}
}
return fields
}
@ -149,3 +165,7 @@ export function isKnexEmptyReadResponse(resp: DatasourcePlusQueryResponse) {
(DSPlusOperation.READ in resp[0] && resp[0].read === true)
)
}
export function isKnexRows(resp: DatasourcePlusQueryResponse): resp is Row[] {
return !isKnexEmptyReadResponse(resp)
}

View File

@ -1,6 +1,6 @@
import * as utils from "../../../../db/utils"
import { context } from "@budibase/backend-core"
import { docIds } from "@budibase/backend-core"
import {
Ctx,
DatasourcePlusQueryResponse,
@ -8,17 +8,18 @@ import {
RelationshipsJson,
Row,
Table,
ViewV2,
} from "@budibase/types"
import {
processDates,
processFormulas,
} from "../../../../utilities/rowProcessor"
import { isKnexEmptyReadResponse } from "./sqlUtils"
import { isKnexRows } from "./sqlUtils"
import { basicProcessing, generateIdForRow, getInternalRowId } from "./basic"
import sdk from "../../../../sdk"
import { processStringSync } from "@budibase/string-templates"
import validateJs from "validate.js"
import { getFullUser } from "../../../../utilities/users"
import { helpers } from "@budibase/shared-core"
validateJs.extend(validateJs.validators.datetime, {
parse: function (value: string) {
@ -58,26 +59,11 @@ export async function processRelationshipFields(
return row
}
export async function findRow(tableId: string, rowId: string) {
const db = context.getAppDB()
let row: Row
// TODO remove special user case in future
if (tableId === utils.InternalTables.USER_METADATA) {
row = await getFullUser(rowId)
} else {
row = await db.get(rowId)
}
if (row.tableId !== tableId) {
throw "Supplied tableId does not match the rows tableId"
}
return row
}
export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
// top priority, use the URL first
if (ctx.params?.sourceId) {
const { sourceId } = ctx.params
if (utils.isViewID(sourceId)) {
if (docIds.isViewId(sourceId)) {
return {
tableId: utils.extractViewInfoFromID(sourceId).tableId,
viewId: sourceId,
@ -96,22 +82,22 @@ export function getSourceId(ctx: Ctx): { tableId: string; viewId?: string } {
throw new Error("Unable to find table ID in request")
}
export async function validate(
opts: { row: Row } & ({ tableId: string } | { table: Table })
) {
let fetchedTable: Table
if ("tableId" in opts) {
fetchedTable = await sdk.tables.getTable(opts.tableId)
} else {
fetchedTable = opts.table
export async function getSource(ctx: Ctx): Promise<Table | ViewV2> {
const { tableId, viewId } = getSourceId(ctx)
if (viewId) {
return sdk.views.get(viewId)
}
return sdk.rows.utils.validate({
...opts,
table: fetchedTable,
})
return sdk.tables.getTable(tableId)
}
function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function getTableFromSource(source: Table | ViewV2) {
if (sdk.views.isView(source)) {
return await sdk.views.getTable(source.id)
}
return source
}
function fixBooleanFields(row: Row, table: Table) {
for (let col of Object.values(table.schema)) {
if (col.type === FieldType.BOOLEAN) {
if (row[col.name] === 1) {
@ -126,49 +112,45 @@ function fixBooleanFields({ row, table }: { row: Row; table: Table }) {
export async function sqlOutputProcessing(
rows: DatasourcePlusQueryResponse,
table: Table,
source: Table | ViewV2,
tables: Record<string, Table>,
relationships: RelationshipsJson[],
opts?: { sqs?: boolean }
): Promise<Row[]> {
if (isKnexEmptyReadResponse(rows)) {
if (!isKnexRows(rows)) {
return []
}
let finalRows: { [key: string]: Row } = {}
for (let row of rows as Row[]) {
let rowId = row._id
let table: Table
let isCalculationView = false
if (sdk.views.isView(source)) {
table = await sdk.views.getTable(source.id)
isCalculationView = helpers.views.isCalculationView(source)
} else {
table = source
}
let processedRows: Row[] = []
for (let row of rows) {
if (opts?.sqs) {
rowId = getInternalRowId(row, table)
row._id = rowId
} else if (!rowId) {
rowId = generateIdForRow(row, table)
row._id = rowId
row._id = getInternalRowId(row, table)
} else if (row._id == null && !isCalculationView) {
row._id = generateIdForRow(row, table)
}
const thisRow = basicProcessing({
row = await basicProcessing({
row,
table,
source,
tables: Object.values(tables),
isLinked: false,
sqs: opts?.sqs,
})
if (thisRow._id == null) {
throw new Error("Unable to generate row ID for SQL rows")
}
finalRows[thisRow._id] = fixBooleanFields({ row: thisRow, table })
row = fixBooleanFields(row, table)
row = await processRelationshipFields(table, tables, row, relationships)
processedRows.push(row)
}
// make sure all related rows are correct
let finalRowArray = []
for (let row of Object.values(finalRows)) {
finalRowArray.push(
await processRelationshipFields(table, tables, row, relationships)
)
}
// process some additional types
finalRowArray = processDates(table, finalRowArray)
return finalRowArray
return processDates(table, processedRows)
}
export function isUserMetadataTable(tableId: string) {

View File

@ -5,14 +5,9 @@ import {
SearchViewRowRequest,
RequiredKeys,
RowSearchParams,
SearchFilterKey,
LogicalOperator,
} from "@budibase/types"
import { dataFilters } from "@budibase/shared-core"
import sdk from "../../../sdk"
import { db, context, features } from "@budibase/backend-core"
import { enrichSearchContext } from "./utils"
import { isExternalTableID } from "../../../integrations/utils"
import { context } from "@budibase/backend-core"
export async function searchView(
ctx: UserCtx<SearchViewRowRequest, SearchRowResponse>
@ -32,55 +27,15 @@ export async function searchView(
.map(([key]) => key)
const { body } = ctx.request
// Enrich saved query with ephemeral query params.
// We prevent searching on any fields that are saved as part of the query, as
// that could let users find rows they should not be allowed to access.
let query = dataFilters.buildQuery(view.query || [])
if (body.query) {
// Delete extraneous search params that cannot be overridden
delete body.query.onEmptyFilter
if (
!isExternalTableID(view.tableId) &&
!(await features.flags.isEnabled("SQS"))
) {
// Extract existing fields
const existingFields =
view.query
?.filter(filter => filter.field)
.map(filter => db.removeKeyNumbering(filter.field)) || []
// Carry over filters for unused fields
Object.keys(body.query).forEach(key => {
const operator = key as Exclude<SearchFilterKey, LogicalOperator>
Object.keys(body.query[operator] || {}).forEach(field => {
if (!existingFields.includes(db.removeKeyNumbering(field))) {
query[operator]![field] = body.query[operator]![field]
}
})
})
} else {
query = {
$and: {
conditions: [query, body.query],
},
}
}
}
await context.ensureSnippetContext(true)
const enrichedQuery = await enrichSearchContext(query, {
user: sdk.users.getUserContextBindings(ctx.user),
})
const searchOptions: RequiredKeys<SearchViewRowRequest> &
RequiredKeys<
Pick<RowSearchParams, "tableId" | "viewId" | "query" | "fields">
> = {
tableId: view.tableId,
viewId: view.id,
query: enrichedQuery,
query: body.query,
fields: viewFields,
...getSortOptions(body, view),
limit: body.limit,
@ -89,11 +44,12 @@ export async function searchView(
countRows: body.countRows,
}
const result = await sdk.rows.search(searchOptions)
const result = await sdk.rows.search(searchOptions, {
user: sdk.users.getUserContextBindings(ctx.user),
})
result.rows.forEach(r => (r._viewId = view.id))
ctx.body = result
}
function getSortOptions(request: SearchViewRowRequest, view: ViewV2) {
if (request.sort) {
return {

View File

@ -1,6 +1,7 @@
import {
CreateRowActionRequest,
Ctx,
RowActionPermissions,
RowActionResponse,
RowActionsResponse,
UpdateRowActionRequest,
@ -18,25 +19,27 @@ async function getTable(ctx: Ctx) {
export async function find(ctx: Ctx<void, RowActionsResponse>) {
const table = await getTable(ctx)
const tableId = table._id!
if (!(await sdk.rowActions.docExists(table._id!))) {
const rowActions = await sdk.rowActions.getAll(tableId)
if (!rowActions) {
ctx.body = {
actions: {},
}
return
}
const { actions } = await sdk.rowActions.getAll(table._id!)
const { actions } = rowActions
const result: RowActionsResponse = {
actions: Object.entries(actions).reduce<Record<string, RowActionResponse>>(
(acc, [key, action]) => ({
...acc,
[key]: {
id: key,
tableId: table._id!,
tableId,
name: action.name,
automationId: action.automationId,
allowedViews: flattenAllowedViews(action.permissions.views),
allowedSources: flattenAllowedSources(tableId, action.permissions),
},
}),
{}
@ -49,17 +52,18 @@ export async function create(
ctx: Ctx<CreateRowActionRequest, RowActionResponse>
) {
const table = await getTable(ctx)
const tableId = table._id!
const createdAction = await sdk.rowActions.create(table._id!, {
const createdAction = await sdk.rowActions.create(tableId, {
name: ctx.request.body.name,
})
ctx.body = {
tableId: table._id!,
tableId,
id: createdAction.id,
name: createdAction.name,
automationId: createdAction.automationId,
allowedViews: undefined,
allowedSources: flattenAllowedSources(tableId, createdAction.permissions),
}
ctx.status = 201
}
@ -68,18 +72,19 @@ export async function update(
ctx: Ctx<UpdateRowActionRequest, RowActionResponse>
) {
const table = await getTable(ctx)
const tableId = table._id!
const { actionId } = ctx.params
const action = await sdk.rowActions.update(table._id!, actionId, {
const action = await sdk.rowActions.update(tableId, actionId, {
name: ctx.request.body.name,
})
ctx.body = {
tableId: table._id!,
tableId,
id: action.id,
name: action.name,
automationId: action.automationId,
allowedViews: undefined,
allowedSources: flattenAllowedSources(tableId, action.permissions),
}
}
@ -91,52 +96,89 @@ export async function remove(ctx: Ctx<void, void>) {
ctx.status = 204
}
export async function setTablePermission(ctx: Ctx<void, RowActionResponse>) {
const table = await getTable(ctx)
const tableId = table._id!
const { actionId } = ctx.params
const action = await sdk.rowActions.setTablePermission(tableId, actionId)
ctx.body = {
tableId,
id: action.id,
name: action.name,
automationId: action.automationId,
allowedSources: flattenAllowedSources(tableId, action.permissions),
}
}
export async function unsetTablePermission(ctx: Ctx<void, RowActionResponse>) {
const table = await getTable(ctx)
const tableId = table._id!
const { actionId } = ctx.params
const action = await sdk.rowActions.unsetTablePermission(tableId, actionId)
ctx.body = {
tableId,
id: action.id,
name: action.name,
automationId: action.automationId,
allowedSources: flattenAllowedSources(tableId, action.permissions),
}
}
export async function setViewPermission(ctx: Ctx<void, RowActionResponse>) {
const table = await getTable(ctx)
const tableId = table._id!
const { actionId, viewId } = ctx.params
const action = await sdk.rowActions.setViewPermission(
table._id!,
tableId,
actionId,
viewId
)
ctx.body = {
tableId: table._id!,
tableId,
id: action.id,
name: action.name,
automationId: action.automationId,
allowedViews: flattenAllowedViews(action.permissions.views),
allowedSources: flattenAllowedSources(tableId, action.permissions),
}
}
export async function unsetViewPermission(ctx: Ctx<void, RowActionResponse>) {
const table = await getTable(ctx)
const tableId = table._id!
const { actionId, viewId } = ctx.params
const action = await sdk.rowActions.unsetViewPermission(
table._id!,
tableId,
actionId,
viewId
)
ctx.body = {
tableId: table._id!,
tableId,
id: action.id,
name: action.name,
automationId: action.automationId,
allowedViews: flattenAllowedViews(action.permissions.views),
allowedSources: flattenAllowedSources(tableId, action.permissions),
}
}
function flattenAllowedViews(
permissions: Record<string, { runAllowed: boolean }>
function flattenAllowedSources(
tableId: string,
permissions: RowActionPermissions
) {
const allowedPermissions = Object.entries(permissions || {})
.filter(([_, p]) => p.runAllowed)
.map(([viewId]) => viewId)
if (!allowedPermissions.length) {
return undefined
const allowedPermissions = []
if (permissions.table.runAllowed) {
allowedPermissions.push(tableId)
}
allowedPermissions.push(
...Object.keys(permissions.views || {}).filter(
viewId => permissions.views[viewId].runAllowed
)
)
return allowedPermissions
}

View File

@ -1,11 +1,18 @@
import { Ctx } from "@budibase/types"
import { IsolatedVM } from "../../jsRunner/vm"
import { iifeWrapper } from "@budibase/string-templates"
import { iifeWrapper, UserScriptError } from "@budibase/string-templates"
export async function execute(ctx: Ctx) {
const { script, context } = ctx.request.body
const vm = new IsolatedVM()
ctx.body = vm.withContext(context, () => vm.execute(iifeWrapper(script)))
try {
ctx.body = vm.withContext(context, () => vm.execute(iifeWrapper(script)))
} catch (err: any) {
if (err.code === UserScriptError.code) {
throw err.userScriptError
}
throw err
}
}
export async function save(ctx: Ctx) {

View File

@ -31,7 +31,7 @@ function getDatasourceId(table: Table) {
return breakExternalTableId(table._id).datasourceId
}
export async function save(
export async function updateTable(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
@ -113,11 +113,10 @@ export async function bulkImport(
const processed = await inputProcessing(ctx.user?._id, table, row, {
noAutoRelationships: true,
})
parsedRows.push(processed.row)
table = processed.table
parsedRows.push(processed)
}
await handleRequest(Operation.BULK_UPSERT, table._id!, {
await handleRequest(Operation.BULK_UPSERT, table, {
rows: parsedRows,
})
await events.rows.imported(table, parsedRows.length)

View File

@ -33,7 +33,7 @@ import {
import sdk from "../../../sdk"
import { jsonFromCsvString } from "../../../utilities/csv"
import { builderSocket } from "../../../websockets"
import { cloneDeep, isEqual } from "lodash"
import { cloneDeep } from "lodash"
import {
helpers,
PROTECTED_EXTERNAL_COLUMNS,
@ -71,19 +71,20 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
const datasources = await sdk.datasources.getExternalDatasources()
const external = datasources.flatMap(datasource => {
const external: Table[] = []
for (const datasource of datasources) {
let entities = datasource.entities
if (entities) {
return Object.values(entities).map<Table>((entity: Table) => ({
...entity,
sourceType: TableSourceType.EXTERNAL,
sourceId: datasource._id!,
sql: isSQL(datasource),
}))
} else {
return []
for (const entity of Object.values(entities)) {
external.push({
...(await processTable(entity)),
sourceType: TableSourceType.EXTERNAL,
sourceId: datasource._id!,
sql: isSQL(datasource),
})
}
}
})
}
const result: FetchTablesResponse = []
for (const table of [...internal, ...external]) {
@ -102,18 +103,22 @@ export async function find(ctx: UserCtx<void, TableResponse>) {
export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
const appId = ctx.appId
const table = ctx.request.body
const isImport = table.rows
const { rows, ...table } = ctx.request.body
const isImport = rows
const renaming = ctx.request.body._rename
const isCreate = !table._id
checkDefaultFields(table)
const api = pickApi({ table })
let savedTable = await api.save(ctx, renaming)
if (!table._id) {
let savedTable: Table
if (isCreate) {
savedTable = await sdk.tables.create(table, rows, ctx.user._id)
savedTable = await sdk.tables.enrichViewSchemas(savedTable)
await events.table.created(savedTable)
} else {
const api = pickApi({ table })
savedTable = await api.updateTable(ctx, renaming)
await events.table.updated(savedTable)
}
if (renaming) {
@ -135,6 +140,7 @@ export async function save(ctx: UserCtx<SaveTableRequest, SaveTableResponse>) {
export async function destroy(ctx: UserCtx) {
const appId = ctx.appId
const tableId = ctx.params.tableId
await sdk.rowActions.deleteAll(tableId)
const deletedTable = await pickApi({ tableId }).destroy(ctx)
await events.table.deleted(deletedTable)
ctx.eventEmitter &&
@ -149,12 +155,7 @@ export async function bulkImport(
ctx: UserCtx<BulkImportRequest, BulkImportResponse>
) {
const tableId = ctx.params.tableId
let tableBefore = await sdk.tables.getTable(tableId)
let tableAfter = await pickApi({ tableId }).bulkImport(ctx)
if (!isEqual(tableBefore, tableAfter)) {
await sdk.tables.saveTable(tableAfter)
}
await pickApi({ tableId }).bulkImport(ctx)
// right now we don't trigger anything for bulk import because it
// can only be done in the builder, but in the future we may need to

View File

@ -3,7 +3,6 @@ import { handleDataImport } from "./utils"
import {
BulkImportRequest,
BulkImportResponse,
FieldType,
RenameColumn,
SaveTableRequest,
SaveTableResponse,
@ -13,7 +12,7 @@ import {
} from "@budibase/types"
import sdk from "../../../sdk"
export async function save(
export async function updateTable(
ctx: UserCtx<SaveTableRequest, SaveTableResponse>,
renaming?: RenameColumn
) {
@ -26,19 +25,16 @@ export async function save(
sourceType: rest.sourceType || TableSourceType.INTERNAL,
}
const isImport = !!rows
if (!tableToSave.views) {
tableToSave.views = {}
}
try {
const { table } = await sdk.tables.internal.save(tableToSave, {
user: ctx.user,
userId: ctx.user._id,
rowsToImport: rows,
tableId: ctx.request.body._id,
renaming,
isImport,
})
return table
@ -70,22 +66,10 @@ export async function bulkImport(
) {
const table = await sdk.tables.getTable(ctx.params.tableId)
const { rows, identifierFields } = ctx.request.body
await handleDataImport(
{
...table,
schema: {
_id: {
name: "_id",
type: FieldType.STRING,
},
...table.schema,
},
},
{
importRows: rows,
identifierFields,
user: ctx.user,
}
)
await handleDataImport(table, {
importRows: rows,
identifierFields,
userId: ctx.user._id,
})
return table
}

View File

@ -41,7 +41,7 @@ describe("utils", () => {
const data = [{ name: "Alice" }, { name: "Bob" }, { name: "Claire" }]
const result = await importToRows(data, table, config.user)
const result = await importToRows(data, table, config.user?._id)
expect(result).toEqual([
expect.objectContaining({
autoId: 1,

View File

@ -18,7 +18,6 @@ import { quotas } from "@budibase/pro"
import { events, context, features } from "@budibase/backend-core"
import {
AutoFieldSubType,
ContextUser,
Datasource,
Row,
SourceName,
@ -122,7 +121,7 @@ export function makeSureTableUpToDate(table: Table, tableToSave: Table) {
export async function importToRows(
data: Row[],
table: Table,
user?: ContextUser,
userId?: string,
opts?: { keepCouchId: boolean }
) {
const originalTable = table
@ -136,11 +135,10 @@ export async function importToRows(
// We use a reference to table here and update it after input processing,
// so that we can auto increment auto IDs in imported data properly
const processed = await inputProcessing(user?._id, table, row, {
const processed = await inputProcessing(userId, table, row, {
noAutoRelationships: true,
})
row = processed.row
table = processed.table
row = processed
// However here we must reference the original table, as we want to mutate
// the real schema of the table passed in, not the clone used for
@ -168,11 +166,10 @@ export async function importToRows(
export async function handleDataImport(
table: Table,
opts?: { identifierFields?: string[]; user?: ContextUser; importRows?: Row[] }
opts?: { identifierFields?: string[]; userId?: string; importRows?: Row[] }
) {
const schema = table.schema
const identifierFields = opts?.identifierFields || []
const user = opts?.user
const importRows = opts?.importRows
if (!importRows || !isRows(importRows) || !isSchema(schema)) {
@ -182,7 +179,7 @@ export async function handleDataImport(
const db = context.getAppDB()
const data = parse(importRows, table)
const finalData = await importToRows(data, table, user, {
const finalData = await importToRows(data, table, opts?.userId, {
keepCouchId: identifierFields.includes("_id"),
})
@ -283,22 +280,22 @@ export function checkStaticTables(table: Table) {
class TableSaveFunctions {
db: Database
user?: ContextUser
userId?: string
oldTable?: Table
importRows?: Row[]
rows: Row[]
constructor({
user,
userId,
oldTable,
importRows,
}: {
user?: ContextUser
userId?: string
oldTable?: Table
importRows?: Row[]
}) {
this.db = context.getAppDB()
this.user = user
this.userId = userId
this.oldTable = oldTable
this.importRows = importRows
// any rows that need updated
@ -330,7 +327,7 @@ class TableSaveFunctions {
table = await handleSearchIndexes(table)
table = await handleDataImport(table, {
importRows: this.importRows,
user: this.user,
userId: this.userId,
})
if (await features.flags.isEnabled("SQS")) {
await sdk.tables.sqs.addTable(table)

View File

@ -7,10 +7,75 @@ import {
ViewResponse,
ViewResponseEnriched,
ViewV2,
ViewFieldMetadata,
BasicViewFieldMetadata,
ViewCalculationFieldMetadata,
RelationSchemaField,
ViewFieldMetadata,
CalculationType,
} from "@budibase/types"
import { builderSocket, gridSocket } from "../../../websockets"
import { helpers } from "@budibase/shared-core"
function stripUnknownFields(
field: ViewFieldMetadata
): RequiredKeys<ViewFieldMetadata> {
if (helpers.views.isCalculationField(field)) {
if (field.calculationType === CalculationType.COUNT) {
if ("distinct" in field && field.distinct) {
return {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
distinct: field.distinct,
calculationType: field.calculationType,
field: field.field,
columns: field.columns,
}
} else {
return {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
calculationType: field.calculationType,
columns: field.columns,
}
}
}
const strippedField: RequiredKeys<ViewCalculationFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
calculationType: field.calculationType,
field: field.field,
columns: field.columns,
}
return strippedField
} else {
const strippedField: RequiredKeys<BasicViewFieldMetadata> = {
order: field.order,
width: field.width,
visible: field.visible,
readonly: field.readonly,
icon: field.icon,
columns: field.columns,
}
return strippedField
}
}
function stripUndefinedFields(obj: Record<string, any>): void {
Object.keys(obj)
.filter(key => obj[key] === undefined)
.forEach(key => {
delete obj[key]
})
}
async function parseSchema(view: CreateViewRequest) {
if (!view.schema) {
@ -22,6 +87,7 @@ async function parseSchema(view: CreateViewRequest) {
let fieldRelatedSchema:
| Record<string, RequiredKeys<RelationSchemaField>>
| undefined
if (schemaValue.columns) {
fieldRelatedSchema = Object.entries(schemaValue.columns).reduce<
NonNullable<typeof fieldRelatedSchema>
@ -35,25 +101,12 @@ async function parseSchema(view: CreateViewRequest) {
}
return acc
}, {})
schemaValue.columns = fieldRelatedSchema
}
const fieldSchema: RequiredKeys<
ViewFieldMetadata & {
columns: typeof fieldRelatedSchema
}
> = {
order: schemaValue.order,
width: schemaValue.width,
visible: schemaValue.visible,
readonly: schemaValue.readonly,
icon: schemaValue.icon,
columns: fieldRelatedSchema,
}
Object.entries(fieldSchema)
.filter(([, val]) => val === undefined)
.forEach(([key]) => {
delete fieldSchema[key as keyof ViewFieldMetadata]
})
const fieldSchema = stripUnknownFields(schemaValue)
stripUndefinedFields(fieldSchema)
p[fieldName] = fieldSchema
return p
}, {} as Record<string, RequiredKeys<ViewFieldMetadata>>)
@ -74,8 +127,10 @@ export async function create(ctx: Ctx<CreateViewRequest, ViewResponse>) {
const parsedView: Omit<RequiredKeys<ViewV2>, "id" | "version"> = {
name: view.name,
type: view.type,
tableId: view.tableId,
query: view.query,
queryUI: view.queryUI,
sort: view.sort,
schema,
primaryDisplay: view.primaryDisplay,
@ -108,9 +163,11 @@ export async function update(ctx: Ctx<UpdateViewRequest, ViewResponse>) {
const parsedView: RequiredKeys<ViewV2> = {
id: view.id,
name: view.name,
type: view.type,
version: view.version,
tableId: view.tableId,
query: view.query,
queryUI: view.queryUI,
sort: view.sort,
schema,
primaryDisplay: view.primaryDisplay,

View File

@ -33,6 +33,7 @@ import rowActionRoutes from "./rowAction"
export { default as staticRoutes } from "./static"
export { default as publicRoutes } from "./public"
const aiRoutes = pro.ai
const appBackupRoutes = pro.appBackups
const environmentVariableRoutes = pro.environmentVariables
@ -67,6 +68,7 @@ export const mainRoutes: Router[] = [
debugRoutes,
environmentVariableRoutes,
rowActionRoutes,
aiRoutes,
// these need to be handled last as they still use /api/:tableId
// this could be breaking as koa may recognise other routes as this
tableRoutes,

View File

@ -51,6 +51,16 @@ router
authorized(BUILDER),
rowActionController.remove
)
.post(
"/api/tables/:tableId/actions/:actionId/permissions",
authorized(BUILDER),
rowActionController.setTablePermission
)
.delete(
"/api/tables/:tableId/actions/:actionId/permissions",
authorized(BUILDER),
rowActionController.unsetTablePermission
)
.post(
"/api/tables/:tableId/actions/:actionId/permissions/:viewId",
authorized(BUILDER),

View File

@ -14,12 +14,7 @@ jest.mock("../../../utilities/redis", () => ({
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
import { AppStatus } from "../../../db/utils"
import {
events,
utils,
context,
withEnv as withCoreEnv,
} from "@budibase/backend-core"
import { events, utils, context, features } from "@budibase/backend-core"
import env from "../../../environment"
import { type App } from "@budibase/types"
import tk from "timekeeper"
@ -358,9 +353,13 @@ describe("/applications", () => {
.delete(`/api/global/roles/${prodAppId}`)
.reply(200, {})
await withCoreEnv({ TENANT_FEATURE_FLAGS: "*:SQS" }, async () => {
await config.api.application.delete(app.appId)
})
await features.testutils.withFeatureFlags(
"*",
{ SQS: true },
async () => {
await config.api.application.delete(app.appId)
}
)
})
})

View File

@ -1,5 +1,5 @@
import { roles } from "@budibase/backend-core"
import { Document, PermissionLevel, Row, Table, ViewV2 } from "@budibase/types"
import { Document, PermissionLevel, Row } from "@budibase/types"
import * as setup from "./utilities"
import { generator, mocks } from "@budibase/backend-core/tests"
@ -9,13 +9,11 @@ const { BUILTIN_ROLE_IDS } = roles
const HIGHER_ROLE_ID = BUILTIN_ROLE_IDS.BASIC
const STD_ROLE_ID = BUILTIN_ROLE_IDS.PUBLIC
const DEFAULT_TABLE_ROLE_ID = BUILTIN_ROLE_IDS.ADMIN
describe("/permission", () => {
let request = setup.getRequest()
let config = setup.getConfig()
let table: Table & { _id: string }
let perms: Document[]
let row: Row
let view: ViewV2
afterAll(setup.afterAll)
@ -25,18 +23,6 @@ describe("/permission", () => {
beforeEach(async () => {
mocks.licenses.useCloudFree()
table = (await config.createTable()) as typeof table
row = await config.createRow()
view = await config.api.viewV2.create({
tableId: table._id!,
name: generator.guid(),
})
perms = await config.api.permission.add({
roleId: STD_ROLE_ID,
resourceId: table._id,
level: PermissionLevel.READ,
})
})
describe("levels", () => {
@ -54,122 +40,251 @@ describe("/permission", () => {
})
})
describe("add", () => {
it("should be able to add permission to a role for the table", async () => {
expect(perms.length).toEqual(1)
expect(perms[0]._id).toEqual(`${STD_ROLE_ID}`)
})
describe("table permissions", () => {
let tableId: string
it("should get the resource permissions", async () => {
const res = await request
.get(`/api/permission/${table._id}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toEqual({
permissions: {
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
write: { permissionType: "BASE", role: HIGHER_ROLE_ID },
},
})
})
it("should get resource permissions with multiple roles", async () => {
perms = await config.api.permission.add({
roleId: HIGHER_ROLE_ID,
resourceId: table._id,
level: PermissionLevel.WRITE,
})
const res = await config.api.permission.get(table._id)
expect(res).toEqual({
permissions: {
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID },
},
})
const allRes = await request
.get(`/api/permission`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(allRes.body[table._id]["read"]).toEqual(STD_ROLE_ID)
expect(allRes.body[table._id]["write"]).toEqual(HIGHER_ROLE_ID)
})
})
describe("remove", () => {
it("should be able to remove the permission", async () => {
const res = await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: table._id,
level: PermissionLevel.READ,
})
expect(res[0]._id).toEqual(STD_ROLE_ID)
const permsRes = await config.api.permission.get(table._id)
expect(permsRes.permissions[STD_ROLE_ID]).toBeUndefined()
})
})
describe("check public user allowed", () => {
it("should be able to read the row", async () => {
// replicate changes before checking permissions
await config.publish()
const res = await request
.get(`/api/${table._id}/rows`)
.set(config.publicHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body[0]._id).toEqual(row._id)
})
it("should be able to access the view data when the table is set to public and with no view permissions overrides", async () => {
// replicate changes before checking permissions
await config.publish()
const res = await config.api.viewV2.publicSearch(view.id)
expect(res.rows[0]._id).toEqual(row._id)
})
it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => {
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: table._id,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions
await config.publish()
await config.api.viewV2.publicSearch(view.id, undefined, { status: 401 })
})
it("should use the view permissions", async () => {
beforeEach(async () => {
const table = await config.createTable()
tableId = table._id!
await config.api.permission.add({
roleId: STD_ROLE_ID,
resourceId: view.id,
resourceId: tableId,
level: PermissionLevel.READ,
})
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: table._id,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions
await config.publish()
const res = await config.api.viewV2.publicSearch(view.id)
expect(res.rows[0]._id).toEqual(row._id)
})
it("shouldn't allow writing from a public user", async () => {
const res = await request
.post(`/api/${table._id}/rows`)
.send(basicRow(table._id))
.set(config.publicHeaders())
.expect("Content-Type", /json/)
.expect(401)
expect(res.status).toEqual(401)
it("tables should be defaulted to admin", async () => {
const table = await config.createTable()
const { permissions } = await config.api.permission.get(table._id!)
expect(permissions).toEqual({
read: {
permissionType: "EXPLICIT",
role: DEFAULT_TABLE_ROLE_ID,
},
write: {
permissionType: "EXPLICIT",
role: DEFAULT_TABLE_ROLE_ID,
},
})
})
describe("add", () => {
it("should be able to add permission to a role for the table", async () => {
const res = await request
.get(`/api/permission/${tableId}`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body).toEqual({
permissions: {
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
write: { permissionType: "EXPLICIT", role: DEFAULT_TABLE_ROLE_ID },
},
})
})
it("should get resource permissions with multiple roles", async () => {
await config.api.permission.add({
roleId: HIGHER_ROLE_ID,
resourceId: tableId,
level: PermissionLevel.WRITE,
})
const res = await config.api.permission.get(tableId)
expect(res).toEqual({
permissions: {
read: { permissionType: "EXPLICIT", role: STD_ROLE_ID },
write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID },
},
})
const allRes = await request
.get(`/api/permission`)
.set(config.defaultHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(allRes.body[tableId]["read"]).toEqual(STD_ROLE_ID)
expect(allRes.body[tableId]["write"]).toEqual(HIGHER_ROLE_ID)
})
})
describe("remove", () => {
it("should be able to remove the permission", async () => {
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: tableId,
level: PermissionLevel.READ,
})
const permsRes = await config.api.permission.get(tableId)
expect(permsRes.permissions[STD_ROLE_ID]).toBeUndefined()
})
})
describe("check public user allowed", () => {
let viewId: string
let row: Row
beforeEach(async () => {
const view = await config.api.viewV2.create({
tableId,
name: generator.guid(),
})
viewId = view.id
row = await config.createRow()
})
it("should be able to read the row", async () => {
// replicate changes before checking permissions
await config.publish()
const res = await request
.get(`/api/${tableId}/rows`)
.set(config.publicHeaders())
.expect("Content-Type", /json/)
.expect(200)
expect(res.body[0]._id).toEqual(row._id)
})
it("should be able to access the view data when the table is set to public and with no view permissions overrides", async () => {
// Make view inherit table permissions. Needed for backwards compatibility with existing views.
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: viewId,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions
await config.publish()
const res = await config.api.viewV2.publicSearch(viewId)
expect(res.rows[0]._id).toEqual(row._id)
})
it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => {
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: tableId,
level: PermissionLevel.READ,
})
// Make view inherit table permissions. Needed for backwards compatibility with existing views.
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: viewId,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions
await config.publish()
await config.api.viewV2.publicSearch(viewId, undefined, {
status: 401,
})
})
it("should use the view permissions", async () => {
await config.api.permission.add({
roleId: STD_ROLE_ID,
resourceId: viewId,
level: PermissionLevel.READ,
})
await config.api.permission.revoke({
roleId: STD_ROLE_ID,
resourceId: tableId,
level: PermissionLevel.READ,
})
// replicate changes before checking permissions
await config.publish()
const res = await config.api.viewV2.publicSearch(viewId)
expect(res.rows[0]._id).toEqual(row._id)
})
it("shouldn't allow writing from a public user", async () => {
const res = await request
.post(`/api/${tableId}/rows`)
.send(basicRow(tableId))
.set(config.publicHeaders())
.expect("Content-Type", /json/)
.expect(401)
expect(res.status).toEqual(401)
})
})
})
describe("view permissions", () => {
let tableId: string
let viewId: string
beforeEach(async () => {
const table = await config.createTable()
tableId = table._id!
const view = await config.api.viewV2.create({
tableId,
name: generator.guid(),
})
viewId = view.id
})
it("default permissions inherits and persists the table default value", async () => {
const { permissions } = await config.api.permission.get(viewId)
expect(permissions).toEqual({
read: {
permissionType: "EXPLICIT",
role: DEFAULT_TABLE_ROLE_ID,
inheritablePermission: DEFAULT_TABLE_ROLE_ID,
},
write: {
permissionType: "EXPLICIT",
role: DEFAULT_TABLE_ROLE_ID,
inheritablePermission: DEFAULT_TABLE_ROLE_ID,
},
})
})
it("does not update view permissions once persisted, even if table permissions change", async () => {
await config.api.permission.add({
roleId: STD_ROLE_ID,
resourceId: tableId,
level: PermissionLevel.READ,
})
const { permissions } = await config.api.permission.get(viewId)
expect(permissions).toEqual({
read: {
permissionType: "EXPLICIT",
role: DEFAULT_TABLE_ROLE_ID,
inheritablePermission: STD_ROLE_ID,
},
write: {
permissionType: "EXPLICIT",
role: DEFAULT_TABLE_ROLE_ID,
inheritablePermission: DEFAULT_TABLE_ROLE_ID,
},
})
})
it("can sets permissions inherits explicit view permissions", async () => {
await config.api.permission.add({
roleId: HIGHER_ROLE_ID,
resourceId: viewId,
level: PermissionLevel.WRITE,
})
const { permissions } = await config.api.permission.get(viewId)
expect(permissions).toEqual({
read: {
permissionType: "EXPLICIT",
role: DEFAULT_TABLE_ROLE_ID,
inheritablePermission: DEFAULT_TABLE_ROLE_ID,
},
write: {
permissionType: "EXPLICIT",
role: HIGHER_ROLE_ID,
inheritablePermission: DEFAULT_TABLE_ROLE_ID,
},
})
})
})

View File

@ -28,6 +28,7 @@ describe.each(
const config = setup.getConfig()
const isOracle = dbName === DatabaseName.ORACLE
const isMsSQL = dbName === DatabaseName.SQL_SERVER
const isPostgres = dbName === DatabaseName.POSTGRES
let rawDatasource: Datasource
let datasource: Datasource
@ -47,6 +48,9 @@ describe.each(
transformer: "return data",
readable: true,
}
if (query.fields?.sql && typeof query.fields.sql !== "string") {
throw new Error("Unable to create with knex structure in 'sql' field")
}
return await config.api.query.save(
{ ...defaultQuery, ...query },
expectations
@ -207,6 +211,31 @@ describe.each(
expect(prodQuery.parameters).toBeUndefined()
expect(prodQuery.schema).toBeDefined()
})
isPostgres &&
it("should be able to handle a JSON aggregate with newlines", async () => {
const jsonStatement = `COALESCE(json_build_object('name', name),'{"name":{}}'::json)`
const query = await createQuery({
fields: {
sql: client("test_table")
.select([
"*",
client.raw(
`${jsonStatement} as json,\n${jsonStatement} as json2`
),
])
.toString(),
},
})
const res = await config.api.query.execute(
query._id!,
{},
{
status: 200,
}
)
expect(res).toBeDefined()
})
})
})

View File

@ -161,7 +161,7 @@ describe("/roles", () => {
it("should not fetch higher level accessible roles when a custom role header is provided", async () => {
await createRole({
name: `CUSTOM_ROLE`,
name: `custom_role_1`,
inherits: roles.BUILTIN_ROLE_IDS.BASIC,
permissionId: permissions.BuiltinPermissionID.READ_ONLY,
version: "name",
@ -170,11 +170,11 @@ describe("/roles", () => {
.get("/api/roles/accessible")
.set({
...config.defaultHeaders(),
"x-budibase-role": "CUSTOM_ROLE",
"x-budibase-role": "custom_role_1",
})
.expect(200)
expect(res.body.length).toBe(3)
expect(res.body[0]).toBe("CUSTOM_ROLE")
expect(res.body[0]).toBe("custom_role_1")
expect(res.body[1]).toBe("BASIC")
expect(res.body[2]).toBe("PUBLIC")
})

Some files were not shown because too many files have changed in this diff Show More