Merge branch 'master' of github.com:Budibase/budibase into feature/role-multi-inheritance

This commit is contained in:
mike12345567 2024-10-15 13:34:39 +01:00
commit ca72dffb1f
22 changed files with 349 additions and 88 deletions

View File

@ -2,13 +2,11 @@ name: deploy-featurebranch
on: on:
pull_request: pull_request:
types: [ types:
labeled, - labeled
# default types below (https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request) - opened
opened, - synchronize
synchronize, - reopened
reopened,
]
jobs: jobs:
release: release:
@ -22,31 +20,21 @@ jobs:
contains(github.event.pull_request.labels.*.name, 'feature-branch-enterprise') contains(github.event.pull_request.labels.*.name, 'feature-branch-enterprise')
) )
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set PAYLOAD_LICENSE_TYPE
id: set_license_type
run: |
if [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=free" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-pro') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=pro" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-team') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=team" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-business') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=business" >> $GITHUB_ENV
elif [[ "${{ contains(github.event.pull_request.labels.*.name, 'feature-branch-enterprise') }}" == "true" ]]; then
echo "PAYLOAD_LICENSE_TYPE=enterprise" >> $GITHUB_ENV
else
echo "PAYLOAD_LICENSE_TYPE=free" >> $GITHUB_ENV
fi
- uses: passeidireto/trigger-external-workflow-action@main
env: env:
PAYLOAD_BRANCH: ${{ github.head_ref }} PAYLOAD_BRANCH: ${{ github.head_ref }}
PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }} PAYLOAD_PR_NUMBER: ${{ github.event.pull_request.number }}
PAYLOAD_LICENSE_TYPE: ${{ env.PAYLOAD_LICENSE_TYPE }} PAYLOAD_LICENSE_TYPE: |
${{
contains(github.event.pull_request.labels.*.name, 'feature-branch') && 'free' ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-pro') && 'pro' ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-team') && 'team' ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-business') && 'business' ||
contains(github.event.pull_request.labels.*.name, 'feature-branch-enterprise') && 'enterprise' || 'free'
}}
steps:
- uses: actions/checkout@v4
- uses: passeidireto/trigger-external-workflow-action@main
with: with:
repository: budibase/budibase-deploys repository: budibase/budibase-deploys
event: featurebranch-qa-deploy event: featurebranch-qa-deploy

View File

@ -42,14 +42,12 @@ spec:
{{ else }} {{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}
{{ end }} {{ end }}
{{ if .Values.globals.sqs.enabled }}
- name: COUCH_DB_SQL_URL - name: COUCH_DB_SQL_URL
{{ if .Values.globals.sqs.url }} {{ if .Values.globals.sqs.url }}
value: {{ .Values.globals.sqs.url }} value: {{ .Values.globals.sqs.url | quote }}
{{ else }} {{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }}
{{ end }} {{ end }}
{{ end }}
{{ if .Values.services.couchdb.enabled }} {{ if .Values.services.couchdb.enabled }}
- name: COUCH_DB_USER - name: COUCH_DB_USER
valueFrom: valueFrom:

View File

@ -43,6 +43,12 @@ spec:
{{ else }} {{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}
{{ end }} {{ end }}
- name: COUCH_DB_SQL_URL
{{ if .Values.globals.sqs.url }}
value: {{ .Values.globals.sqs.url | quote }}
{{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }}
{{ end }}
{{ if .Values.services.couchdb.enabled }} {{ if .Values.services.couchdb.enabled }}
- name: COUCH_DB_USER - name: COUCH_DB_USER
valueFrom: valueFrom:

View File

@ -56,14 +56,12 @@ spec:
{{ else }} {{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.services.couchdb.port }}
{{ end }} {{ end }}
{{ if .Values.globals.sqs.enabled }}
- name: COUCH_DB_SQL_URL - name: COUCH_DB_SQL_URL
{{ if .Values.globals.sqs.url }} {{ if .Values.globals.sqs.url }}
value: {{ .Values.globals.sqs.url }} value: {{ .Values.globals.sqs.url | quote }}
{{ else }} {{ else }}
value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }} value: http://{{ .Release.Name }}-svc-couchdb:{{ .Values.globals.sqs.port }}
{{ end }} {{ end }}
{{ end }}
- name: API_ENCRYPTION_KEY - name: API_ENCRYPTION_KEY
valueFrom: valueFrom:
secretKeyRef: secretKeyRef:

View File

@ -139,9 +139,6 @@ globals:
password: "" password: ""
sqs: sqs:
# -- Whether to use the CouchDB "structured query service" or not. This is disabled by
# default for now, but will become the default in a future release.
enabled: false
# @ignore # @ignore
url: "" url: ""
# @ignore # @ignore

View File

@ -5,7 +5,7 @@ version: "3"
services: services:
app-service: app-service:
restart: unless-stopped restart: unless-stopped
image: budibase.docker.scarf.sh/budibase/apps image: budibase/apps
container_name: bbapps container_name: bbapps
environment: environment:
SELF_HOSTED: 1 SELF_HOSTED: 1
@ -35,7 +35,7 @@ services:
worker-service: worker-service:
restart: unless-stopped restart: unless-stopped
image: budibase.docker.scarf.sh/budibase/worker image: budibase/worker
container_name: bbworker container_name: bbworker
environment: environment:
SELF_HOSTED: 1 SELF_HOSTED: 1
@ -97,7 +97,7 @@ services:
couchdb-service: couchdb-service:
restart: unless-stopped restart: unless-stopped
image: budibase/couchdb image: budibase/couchdb:v3.3.3-sqs-v2.1.1
environment: environment:
- COUCHDB_PASSWORD=${COUCH_DB_PASSWORD} - COUCHDB_PASSWORD=${COUCH_DB_PASSWORD}
- COUCHDB_USER=${COUCH_DB_USER} - COUCHDB_USER=${COUCH_DB_USER}

View File

@ -69,6 +69,9 @@ WORKDIR /minio
COPY scripts/install-minio.sh ./install.sh COPY scripts/install-minio.sh ./install.sh
RUN chmod +x install.sh && ./install.sh RUN chmod +x install.sh && ./install.sh
# setup redis
COPY hosting/single/redis.conf /etc/redis/redis.conf
# setup runner file # setup runner file
WORKDIR / WORKDIR /
COPY hosting/single/runner.sh . COPY hosting/single/runner.sh .

View File

@ -0,0 +1,7 @@
dir "DATA_DIR/redis"
appendonly yes
appendfsync everysec
auto-aof-rewrite-percentage 100
auto-aof-rewrite-min-size 64mb

View File

@ -75,13 +75,17 @@ fi
for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done for LINE in $(cat ${DATA_DIR}/.env); do export $LINE; done
ln -s ${DATA_DIR}/.env /app/.env ln -s ${DATA_DIR}/.env /app/.env
ln -s ${DATA_DIR}/.env /worker/.env ln -s ${DATA_DIR}/.env /worker/.env
# make these directories in runner, incase of mount # make these directories in runner, incase of mount
mkdir -p ${DATA_DIR}/minio mkdir -p ${DATA_DIR}/minio
mkdir -p ${DATA_DIR}/redis
chown -R couchdb:couchdb ${DATA_DIR}/couch chown -R couchdb:couchdb ${DATA_DIR}/couch
sed -i "s#DATA_DIR#${DATA_DIR}#g" /etc/redis/redis.conf
if [[ -n "${REDIS_PASSWORD}" ]]; then if [[ -n "${REDIS_PASSWORD}" ]]; then
redis-server --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 & redis-server /etc/redis/redis.conf --requirepass $REDIS_PASSWORD > /dev/stdout 2>&1 &
else else
redis-server > /dev/stdout 2>&1 & redis-server /etc/redis/redis.conf > /dev/stdout 2>&1 &
fi fi
/bbcouch-runner.sh & /bbcouch-runner.sh &

View File

@ -269,7 +269,7 @@ export class FlagSet<V extends Flag<any>, T extends { [key: string]: V }> {
export const flags = new FlagSet({ export const flags = new FlagSet({
DEFAULT_VALUES: Flag.boolean(env.isDev()), DEFAULT_VALUES: Flag.boolean(env.isDev()),
AUTOMATION_BRANCHING: Flag.boolean(env.isDev()), AUTOMATION_BRANCHING: Flag.boolean(env.isDev()),
SQS: Flag.boolean(env.isDev()), SQS: Flag.boolean(true),
[FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()), [FeatureFlag.AI_CUSTOM_CONFIGS]: Flag.boolean(env.isDev()),
[FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(env.isDev()), [FeatureFlag.ENRICHED_RELATIONSHIPS]: Flag.boolean(env.isDev()),
[FeatureFlag.TABLES_DEFAULT_ADMIN]: Flag.boolean(env.isDev()), [FeatureFlag.TABLES_DEFAULT_ADMIN]: Flag.boolean(env.isDev()),

View File

@ -10,6 +10,7 @@ const schema = {
TEST_BOOLEAN: Flag.boolean(false), TEST_BOOLEAN: Flag.boolean(false),
TEST_STRING: Flag.string("default value"), TEST_STRING: Flag.string("default value"),
TEST_NUMBER: Flag.number(0), TEST_NUMBER: Flag.number(0),
TEST_BOOLEAN_DEFAULT_TRUE: Flag.boolean(true),
} }
const flags = new FlagSet(schema) const flags = new FlagSet(schema)
@ -123,6 +124,11 @@ describe("feature flags", () => {
}, },
expected: flags.defaults(), expected: flags.defaults(),
}, },
{
it: "should be possible to override a default true flag to false",
environmentFlags: "default:!TEST_BOOLEAN_DEFAULT_TRUE",
expected: { TEST_BOOLEAN_DEFAULT_TRUE: false },
},
])( ])(
"$it", "$it",
async ({ async ({

View File

@ -521,8 +521,11 @@ class InternalBuilder {
const [filterTableName, ...otherProperties] = key.split(".") const [filterTableName, ...otherProperties] = key.split(".")
const property = otherProperties.join(".") const property = otherProperties.join(".")
const alias = getTableAlias(filterTableName) const alias = getTableAlias(filterTableName)
return fn(q, alias ? `${alias}.${property}` : property, value) return q.andWhere(subquery =>
fn(subquery, alias ? `${alias}.${property}` : property, value)
)
} }
for (const key in structure) { for (const key in structure) {
const value = structure[key] const value = structure[key]
const updatedKey = dbCore.removeKeyNumbering(key) const updatedKey = dbCore.removeKeyNumbering(key)
@ -552,6 +555,9 @@ class InternalBuilder {
value value
) )
} else if (shouldProcessRelationship) { } else if (shouldProcessRelationship) {
if (allOr) {
query = query.or
}
query = builder.addRelationshipForFilter(query, updatedKey, q => { query = builder.addRelationshipForFilter(query, updatedKey, q => {
return handleRelationship(q, updatedKey, value) return handleRelationship(q, updatedKey, value)
}) })

View File

@ -23,6 +23,7 @@
sourceType: DB_TYPE_EXTERNAL, sourceType: DB_TYPE_EXTERNAL,
schema: { schema: {
id: { id: {
name: "id",
autocolumn: true, autocolumn: true,
type: "number", type: "number",
}, },

View File

@ -2,36 +2,31 @@
export let isMigrationDone export let isMigrationDone
export let onMigrationDone export let onMigrationDone
export let timeoutSeconds = 60 // 1 minute export let timeoutSeconds = 60 // 1 minute
export let minTimeSeconds = 3
const loadTime = Date.now()
const intervalMs = 1000
let timedOut = false let timedOut = false
let secondsWaited = 0
async function checkMigrationsFinished() { async function checkMigrationsFinished() {
setTimeout(async () => { let totalWaitMs = 0
// eslint-disable-next-line no-constant-condition
while (true) {
const waitForMs = 5000 + Math.random() * 5000
await new Promise(resolve => setTimeout(resolve, waitForMs))
totalWaitMs += waitForMs
const isMigrated = await isMigrationDone() const isMigrated = await isMigrationDone()
if (isMigrated) {
const timeoutMs = timeoutSeconds * 1000
if (!isMigrated || secondsWaited <= minTimeSeconds) {
if (loadTime + timeoutMs > Date.now()) {
secondsWaited += 1
return checkMigrationsFinished()
}
return migrationTimeout()
}
onMigrationDone() onMigrationDone()
}, intervalMs) return
}
if (totalWaitMs > timeoutSeconds * 1000) {
timedOut = true
return
}
}
} }
checkMigrationsFinished() checkMigrationsFinished()
function migrationTimeout() {
timedOut = true
}
</script> </script>
<div class="loading" class:timeout={timedOut}> <div class="loading" class:timeout={timedOut}>

View File

@ -71,19 +71,20 @@ export async function fetch(ctx: UserCtx<void, FetchTablesResponse>) {
const datasources = await sdk.datasources.getExternalDatasources() const datasources = await sdk.datasources.getExternalDatasources()
const external = datasources.flatMap(datasource => { const external: Table[] = []
for (const datasource of datasources) {
let entities = datasource.entities let entities = datasource.entities
if (entities) { if (entities) {
return Object.values(entities).map<Table>((entity: Table) => ({ for (const entity of Object.values(entities)) {
...entity, external.push({
...(await processTable(entity)),
sourceType: TableSourceType.EXTERNAL, sourceType: TableSourceType.EXTERNAL,
sourceId: datasource._id!, sourceId: datasource._id!,
sql: isSQL(datasource), sql: isSQL(datasource),
}))
} else {
return []
}
}) })
}
}
}
const result: FetchTablesResponse = [] const result: FetchTablesResponse = []
for (const table of [...internal, ...external]) { for (const table of [...internal, ...external]) {

View File

@ -789,6 +789,39 @@ describe.each([
}) })
}) })
describe("multi-user column", () => {
beforeAll(async () => {
table = await config.api.table.save(
saveTableRequest({
schema: {
users: {
name: "users",
type: FieldType.BB_REFERENCE,
subtype: BBReferenceFieldSubType.USER,
default: ["{{ [Current User]._id }}"],
},
},
})
)
})
it("creates a new row with a default value successfully", async () => {
const row = await config.api.row.save(table._id!, {})
expect(row.users).toHaveLength(1)
expect(row.users[0]._id).toEqual(config.getUser()._id)
})
it("does not use default value if value specified", async () => {
const id = `us_${utils.newid()}`
await config.createUser({ _id: id })
const row = await config.api.row.save(table._id!, {
users: [id],
})
expect(row.users).toHaveLength(1)
expect(row.users[0]._id).toEqual(id)
})
})
describe("bindings", () => { describe("bindings", () => {
describe("string column", () => { describe("string column", () => {
beforeAll(async () => { beforeAll(async () => {

View File

@ -23,6 +23,7 @@ import {
EmptyFilterOption, EmptyFilterOption,
FieldType, FieldType,
JsonFieldSubType, JsonFieldSubType,
LogicalOperator,
RelationshipType, RelationshipType,
Row, Row,
RowSearchParams, RowSearchParams,
@ -2329,6 +2330,211 @@ describe.each([
equal: { ["name"]: "baz" }, equal: { ["name"]: "baz" },
}).toContainExactly([{ name: "baz", productCat: undefined }]) }).toContainExactly([{ name: "baz", productCat: undefined }])
}) })
describe("logical filters", () => {
const logicalOperators = [LogicalOperator.AND, LogicalOperator.OR]
describe("$and", () => {
it("should allow single conditions", async () => {
await expectQuery({
$and: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
},
],
},
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
})
it("should allow exclusive conditions", async () => {
await expectQuery({
$and: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
notEqual: { ["productCat.name"]: "foo" },
},
],
},
}).toContainExactly([])
})
it.each([logicalOperators])(
"should allow nested ands with single conditions (with %s as root)",
async rootOperator => {
await expectQuery({
[rootOperator]: {
conditions: [
{
$and: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
},
],
},
},
],
},
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
}
)
it.each([logicalOperators])(
"should allow nested ands with exclusive conditions (with %s as root)",
async rootOperator => {
await expectQuery({
[rootOperator]: {
conditions: [
{
$and: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
notEqual: { ["productCat.name"]: "foo" },
},
],
},
},
],
},
}).toContainExactly([])
}
)
it.each([logicalOperators])(
"should allow nested ands with multiple conditions (with %s as root)",
async rootOperator => {
await expectQuery({
[rootOperator]: {
conditions: [
{
$and: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
},
],
},
notEqual: { ["productCat.name"]: "foo" },
},
],
},
}).toContainExactly([])
}
)
})
describe("$ors", () => {
it("should allow single conditions", async () => {
await expectQuery({
$or: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
},
],
},
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
})
it("should allow exclusive conditions", async () => {
await expectQuery({
$or: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
notEqual: { ["productCat.name"]: "foo" },
},
],
},
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
{ name: "bar", productCat: [{ _id: productCatRows[1]._id }] },
// { name: "baz", productCat: undefined }, // TODO
])
})
it.each([logicalOperators])(
"should allow nested ors with single conditions (with %s as root)",
async rootOperator => {
await expectQuery({
[rootOperator]: {
conditions: [
{
$or: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
},
],
},
},
],
},
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
])
}
)
it.each([logicalOperators])(
"should allow nested ors with exclusive conditions (with %s as root)",
async rootOperator => {
await expectQuery({
[rootOperator]: {
conditions: [
{
$or: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
notEqual: { ["productCat.name"]: "foo" },
},
],
},
},
],
},
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
{ name: "bar", productCat: [{ _id: productCatRows[1]._id }] },
// { name: "baz", productCat: undefined }, // TODO
])
}
)
it("should allow nested ors with multiple conditions", async () => {
await expectQuery({
$or: {
conditions: [
{
$or: {
conditions: [
{
equal: { ["productCat.name"]: "foo" },
},
],
},
notEqual: { ["productCat.name"]: "foo" },
},
],
},
}).toContainExactly([
{ name: "foo", productCat: [{ _id: productCatRows[0]._id }] },
{ name: "bar", productCat: [{ _id: productCatRows[1]._id }] },
// { name: "baz", productCat: undefined }, // TODO
])
})
})
})
}) })
isSql && isSql &&

View File

@ -79,7 +79,7 @@ describe("Captures of real examples", () => {
sql: expect.stringContaining( sql: expect.stringContaining(
multiline( multiline(
`where exists (select 1 from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid" where "c"."productid" = "a"."productid" `where exists (select 1 from "tasks" as "b" inner join "products_tasks" as "c" on "b"."taskid" = "c"."taskid" where "c"."productid" = "a"."productid"
and COALESCE("b"."taskname" = $1, FALSE)` and (COALESCE("b"."taskname" = $1, FALSE))`
) )
), ),
}) })
@ -144,7 +144,7 @@ describe("Captures of real examples", () => {
], ],
sql: expect.stringContaining( sql: expect.stringContaining(
multiline( multiline(
`where exists (select 1 from "persons" as "c" where "c"."personid" = "a"."executorid" and "c"."year" between $1 and $2)` `where exists (select 1 from "persons" as "c" where "c"."personid" = "a"."executorid" and ("c"."year" between $1 and $2))`
) )
), ),
}) })

View File

@ -20,7 +20,13 @@ export async function processTable(table: Table): Promise<Table> {
if (!table) { if (!table) {
return table return table
} }
table = { ...table }
if (table._id && isExternalTableID(table._id)) { if (table._id && isExternalTableID(table._id)) {
// Old created external tables via Budibase might have a missing field name breaking some UI such as filters
if (table.schema["id"] && !table.schema["id"].name) {
table.schema["id"].name = "id"
}
return { return {
...table, ...table,
type: "table", type: "table",

View File

@ -33,7 +33,7 @@ import {
PROTECTED_EXTERNAL_COLUMNS, PROTECTED_EXTERNAL_COLUMNS,
PROTECTED_INTERNAL_COLUMNS, PROTECTED_INTERNAL_COLUMNS,
} from "@budibase/shared-core" } from "@budibase/shared-core"
import { processString } from "@budibase/string-templates" import { processStringSync } from "@budibase/string-templates"
import { import {
getTableFromSource, getTableFromSource,
isUserMetadataTable, isUserMetadataTable,
@ -134,10 +134,15 @@ async function processDefaultValues(table: Table, row: Row) {
for (const [key, schema] of Object.entries(table.schema)) { for (const [key, schema] of Object.entries(table.schema)) {
if ("default" in schema && schema.default != null && row[key] == null) { if ("default" in schema && schema.default != null && row[key] == null) {
const processed = let processed: string | string[]
typeof schema.default === "string" if (Array.isArray(schema.default)) {
? await processString(schema.default, ctx) processed = schema.default.map(val => processStringSync(val, ctx))
: schema.default } else if (typeof schema.default === "string") {
processed = processStringSync(schema.default, ctx)
} else {
processed = schema.default
}
try { try {
row[key] = coerce(processed, schema.type) row[key] = coerce(processed, schema.type)
} catch (err: any) { } catch (err: any) {

View File

@ -66,7 +66,7 @@ const allowDefaultColumnByType: Record<FieldType, boolean> = {
[FieldType.ATTACHMENT_SINGLE]: false, [FieldType.ATTACHMENT_SINGLE]: false,
[FieldType.SIGNATURE_SINGLE]: false, [FieldType.SIGNATURE_SINGLE]: false,
[FieldType.LINK]: false, [FieldType.LINK]: false,
[FieldType.BB_REFERENCE]: false, [FieldType.BB_REFERENCE]: true,
[FieldType.BB_REFERENCE_SINGLE]: true, [FieldType.BB_REFERENCE_SINGLE]: true,
} }

View File

@ -121,6 +121,7 @@ export interface BBReferenceFieldMetadata
type: FieldType.BB_REFERENCE type: FieldType.BB_REFERENCE
subtype: BBReferenceFieldSubType subtype: BBReferenceFieldSubType
relationshipType?: RelationshipType relationshipType?: RelationshipType
default?: string[]
} }
export interface BBReferenceSingleFieldMetadata export interface BBReferenceSingleFieldMetadata
extends Omit<BaseFieldSchema, "subtype"> { extends Omit<BaseFieldSchema, "subtype"> {