diff --git a/.eslintrc.json b/.eslintrc.json
index 3de9d13046..ae9512152f 100644
--- a/.eslintrc.json
+++ b/.eslintrc.json
@@ -44,7 +44,8 @@
"no-undef": "off",
"no-prototype-builtins": "off",
"local-rules/no-budibase-imports": "error",
- "local-rules/no-test-com": "error"
+ "local-rules/no-test-com": "error",
+ "local-rules/email-domain-example-com": "error"
}
},
{
diff --git a/eslint-local-rules/index.js b/eslint-local-rules/index.js
index 71bb5068da..177b0a129c 100644
--- a/eslint-local-rules/index.js
+++ b/eslint-local-rules/index.js
@@ -51,4 +51,41 @@ module.exports = {
}
},
},
+ "email-domain-example-com": {
+ meta: {
+ type: "problem",
+ docs: {
+ description:
+ "enforce using the example.com domain for generator.email calls",
+ category: "Possible Errors",
+ recommended: false,
+ },
+ fixable: "code",
+ schema: [],
+ },
+ create: function (context) {
+ return {
+ CallExpression(node) {
+ if (
+ node.callee.type === "MemberExpression" &&
+ node.callee.object.name === "generator" &&
+ node.callee.property.name === "email" &&
+ node.arguments.length === 0
+ ) {
+ context.report({
+ node,
+ message:
+ "Prefer using generator.email with the domain \"{ domain: 'example.com' }\".",
+ fix: function (fixer) {
+ return fixer.replaceText(
+ node,
+ 'generator.email({ domain: "example.com" })'
+ )
+ },
+ })
+ }
+ },
+ }
+ },
+ },
}
diff --git a/i18n/README.kr.md b/i18n/README.kr.md
new file mode 100644
index 0000000000..09fc83569b
--- /dev/null
+++ b/i18n/README.kr.md
@@ -0,0 +1,221 @@
+
+
+
+
+
+
+ Budibase
+
+
+ 자체 인프라에서 몇 분 만에 맞춤형 비즈니스 도구를 구축하세요.
+
+
+ Budibase는 개발자와 IT 전문가가 몇 분 만에 맞춤형 애플리케이션을 구축하고 자동화할 수 있는 오픈 소스 로우코드 플랫폼입니다.
+
+
+
+ 🤖 🎨 🚀
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 소개
+ ·
+ 문서
+ ·
+ 기능 요청
+ ·
+ 버그 보고
+ ·
+ 지원: 토론
+
+
+
+## ✨ 특징
+
+### "실제" 소프트웨어를 구축할 수 있습니다.
+Budibase를 사용하면 고성능 단일 페이지 애플리케이션을 구축할 수 있습니다. 또한 반응형 디자인으로 제작하여 사용자에게 멋진 경험을 제공할 수 있습니다.
+
+
+### 오픈 소스 및 확장성
+Budibase는 오픈소스이며, GPL v3 라이선스에 따라 공개되어 있습니다. 이는 Budibase가 항상 당신 곁에 있다는 안도감을 줄 것입니다. 그리고 우리는 개발자 친화적인 환경을 제공하고 있기 때문에, 당신은 원하는 만큼 소스 코드를 포크하여 수정하거나 Budibase에 직접 기여할 수 있습니다.
+
+
+### 기존 데이터 또는 처음부터 시작
+Budibase를 사용하면 다음과 같은 여러 소스에서 데이터를 가져올 수 있습니다: MondoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB 또는 REST API.
+
+또는 원하는 경우 외부 도구 없이도 Budibase를 사용하여 처음부터 시작하여 자체 애플리케이션을 구축할 수 있습니다.[데이터 소스 제안](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
+
+
+
+
+
+
+### 강력한 내장 구성 요소로 애플리케이션을 설계하고 구축할 수 있습니다.
+
+Budibase에는 아름답게 디자인된 강력한 컴포넌트들이 제공되며, 이를 사용하여 UI를 쉽게 구축할 수 있습니다. 또한, CSS를 통한 스타일링 옵션도 풍부하게 제공되어 보다 창의적인 표현도 가능하다.
+ [Request new component](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
+
+
+
+
+
+
+### 프로세스를 자동화하고, 다른 도구와 연동하고, 웹훅으로 연결하세요!
+워크플로우와 수동 프로세스를 자동화하여 시간을 절약하세요. 웹훅 이벤트 연결부터 이메일 자동화까지, Budibase에 수행할 작업을 지시하기만 하면 자동으로 처리됩니다. [새로운 자동화 만들기](https://github.com/Budibase/automations)또는[새로운 자동화를 요청할 수 있습니다](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas).
+
+
+
+
+
+
+### 선호하는 도구
+Budibase는 사용자의 선호도에 따라 애플리케이션을 구축할 수 있는 다양한 도구를 통합하고 있습니다.
+
+
+
+
+
+
+### 관리자의 천국
+Budibase는 어떤 규모의 프로젝트에도 유연하게 대응할 수 있으며, Budibase를 사용하면 개인 또는 조직의 서버에서 자체 호스팅하고 사용자, 온보딩, SMTP, 앱, 그룹, 테마 등을 한꺼번에 관리할 수 있습니다. 또한, 사용자나 그룹에 앱 포털을 제공하고 그룹 관리자에게 사용자 관리를 맡길 수도 있다.
+- 프로모션 비디오: https://youtu.be/xoljVpty_Kw
+
+
+
+## 🏁 시작
+
+Docker, Kubernetes 또는 Digital Ocean을 사용하여 자체 인프라에서 Budibase를 호스팅하거나, 걱정 없이 빠르게 애플리케이션을 구축하려는 경우 클라우드에서 Budibase를 사용할 수 있습니다.
+
+### [Budibase 셀프 호스팅으로 시작하기](https://docs.budibase.com/docs/hosting-methods)
+
+- [Docker - single ARM compatible image](https://docs.budibase.com/docs/docker)
+- [Docker Compose](https://docs.budibase.com/docs/docker-compose)
+- [Kubernetes](https://docs.budibase.com/docs/kubernetes-k8s)
+- [Digital Ocean](https://docs.budibase.com/docs/digitalocean)
+- [Portainer](https://docs.budibase.com/docs/portainer)
+
+
+### [클라우드에서 Budibase 시작하기](https://budibase.com)
+
+
+
+## 🎓 Budibase 알아보기
+
+문서 [documentacion de Budibase](https://docs.budibase.com/docs).
+
+
+
+
+
+## 💬 커뮤니티
+
+질문하고, 다른 사람을 돕고, 다른 Budibase 사용자와 즐거운 대화를 나눌 수 있는 Budibase 커뮤니티에 여러분을 초대합니다.
+[깃허브 토론](https://github.com/Budibase/budibase/discussions)
+
+
+
+## ❗ 행동강령
+
+Budibase 는 모든 계층의 사람들을 환영하고 상호 존중하는 환경을 제공하는 데 특별한 주의를 기울이고 있습니다. 저희는 커뮤니티에도 같은 기대를 가지고 있습니다.
+[**행동 강령**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md).
+
+
+
+
+
+## 🙌 Contribuir en Budibase
+
+버그 신고부터 코드의 버그 수정에 이르기까지 모든 기여를 감사하고 환영합니다. 새로운 기능을 구현하거나 API를 변경할 계획이 있다면 [여기에 새 메시지](https://github.com/Budibase/budibase/issues),
+이렇게 하면 여러분의 노력이 헛되지 않도록 보장할 수 있습니다.
+
+여기에는 다음을 위해 Budibase 환경을 설정하는 방법에 대한 지침이 나와 있습니다. [여기를 클릭하세요](https://github.com/Budibase/budibase/tree/HEAD/docs/CONTRIBUTING.md).
+
+### 어디서부터 시작해야 할지 혼란스러우신가요?
+이곳은 기여를 시작하기에 최적의 장소입니다! [First time issues project](https://github.com/Budibase/budibase/projects/22).
+
+### 리포지토리 구성
+
+Budibase는 Lerna에서 관리하는 단일 리포지토리입니다. Lerna는 변경 사항이 있을 때마다 이를 동기화하여 Budibase 패키지를 빌드하고 게시합니다. 크게 보면 이러한 패키지가 Budibase를 구성하는 패키지입니다:
+
+- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다.
+
+- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다.
+
+- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - Budibase의 서버 부분입니다. 이 Koa 애플리케이션은 빌더에게 Budibase 애플리케이션을 생성하는 데 필요한 것을 제공하는 역할을 합니다. 또한 데이터베이스 및 파일 저장소와 상호 작용할 수 있는 API를 제공합니다.
+
+자세한 내용은 다음 문서를 참조하세요. [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md)
+
+
+
+
+## 📝 라이선스
+
+Budibase는 오픈 소스이며, 라이선스는 다음과 같습니다 [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html). 클라이언트 및 컴포넌트 라이브러리는 다음과 같이 라이선스가 부여됩니다. [MPL](https://directory.fsf.org/wiki/License:MPL-2.0) - 이렇게 하면 빌드한 애플리케이션에 원하는 대로 라이선스를 부여할 수 있습니다.
+
+
+
+## ⭐ 스타 수의 역사
+
+[![Stargazers over time](https://starchart.cc/Budibase/budibase.svg)](https://starchart.cc/Budibase/budibase)
+
+빌더 업데이트 중 문제가 발생하는 경우 [여기](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md#troubleshooting) 를 참고하여 환경을 정리해 주세요.
+
+
+
+## Contributors ✨
+
+훌륭한 여러분께 감사할 따름입니다. ([emoji key](https://allcontributors.org/docs/en/emoji-key)):
+
+
+
+
+
+
+
+
+
+
+
+이 프로젝트는 다음 사양을 따릅니다. [all-contributors](https://github.com/all-contributors/all-contributors).
+모든 종류의 기여를 환영합니다!
diff --git a/lerna.json b/lerna.json
index 54e106cd5a..a50794e91e 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.20.10",
+ "version": "2.21.4",
"npmClient": "yarn",
"packages": [
"packages/*",
diff --git a/packages/account-portal b/packages/account-portal
index de6d44c372..0c050591c2 160000
--- a/packages/account-portal
+++ b/packages/account-portal
@@ -1 +1 @@
-Subproject commit de6d44c372a7f48ca0ce8c6c0c19311d4bc21646
+Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac
diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json
index 3f8c34f823..fe56780982 100644
--- a/packages/backend-core/package.json
+++ b/packages/backend-core/package.json
@@ -67,7 +67,7 @@
"@types/lodash": "4.14.200",
"@types/node-fetch": "2.6.4",
"@types/pouchdb": "6.4.0",
- "@types/redlock": "4.0.3",
+ "@types/redlock": "4.0.7",
"@types/semver": "7.3.7",
"@types/tar-fs": "2.0.1",
"@types/uuid": "8.3.4",
@@ -78,6 +78,7 @@
"jest-serial-runner": "1.2.1",
"pino-pretty": "10.0.0",
"pouchdb-adapter-memory": "7.2.2",
+ "testcontainers": "^10.7.2",
"timekeeper": "2.2.0",
"typescript": "5.2.2"
},
diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts
index 264984c6a5..74da4fe0d2 100644
--- a/packages/backend-core/src/cache/base/index.ts
+++ b/packages/backend-core/src/cache/base/index.ts
@@ -23,6 +23,18 @@ export default class BaseCache {
return client.keys(pattern)
}
+ async exists(key: string, opts = { useTenancy: true }) {
+ key = opts.useTenancy ? generateTenantKey(key) : key
+ const client = await this.getClient()
+ return client.exists(key)
+ }
+
+ async scan(key: string, opts = { useTenancy: true }) {
+ key = opts.useTenancy ? generateTenantKey(key) : key
+ const client = await this.getClient()
+ return client.scan(key)
+ }
+
/**
* Read only from the cache.
*/
@@ -32,6 +44,15 @@ export default class BaseCache {
return client.get(key)
}
+ /**
+ * Read only from the cache.
+ */
+ async bulkGet(keys: string[], opts = { useTenancy: true }) {
+ keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys
+ const client = await this.getClient()
+ return client.bulkGet(keys)
+ }
+
/**
* Write to the cache.
*/
@@ -46,6 +67,25 @@ export default class BaseCache {
await client.store(key, value, ttl)
}
+ /**
+ * Bulk write to the cache.
+ */
+ async bulkStore(
+ data: Record,
+ ttl: number | null = null,
+ opts = { useTenancy: true }
+ ) {
+ if (opts.useTenancy) {
+ data = Object.entries(data).reduce((acc, [key, value]) => {
+ acc[generateTenantKey(key)] = value
+ return acc
+ }, {} as Record)
+ }
+
+ const client = await this.getClient()
+ await client.bulkStore(data, ttl)
+ }
+
/**
* Remove from cache.
*/
@@ -55,15 +95,24 @@ export default class BaseCache {
return client.delete(key)
}
+ /**
+ * Remove from cache.
+ */
+ async bulkDelete(keys: string[], opts = { useTenancy: true }) {
+ keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys
+ const client = await this.getClient()
+ return client.bulkDelete(keys)
+ }
+
/**
* Read from the cache. Write to the cache if not exists.
*/
- async withCache(
+ async withCache(
key: string,
- ttl: number,
- fetchFn: any,
+ ttl: number | null = null,
+ fetchFn: () => Promise | T,
opts = { useTenancy: true }
- ) {
+ ): Promise {
const cachedValue = await this.get(key, opts)
if (cachedValue) {
return cachedValue
@@ -89,4 +138,13 @@ export default class BaseCache {
throw err
}
}
+
+ /**
+ * Delete the entry if the provided value matches the stored one.
+ */
+ async deleteIfValue(key: string, value: any, opts = { useTenancy: true }) {
+ key = opts.useTenancy ? generateTenantKey(key) : key
+ const client = await this.getClient()
+ await client.deleteIfValue(key, value)
+ }
}
diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts
new file mode 100644
index 0000000000..51018b2317
--- /dev/null
+++ b/packages/backend-core/src/cache/docWritethrough.ts
@@ -0,0 +1,86 @@
+import { AnyDocument, Database } from "@budibase/types"
+
+import { JobQueue, createQueue } from "../queue"
+import * as dbUtils from "../db"
+
+interface ProcessDocMessage {
+ dbName: string
+ docId: string
+ data: Record
+}
+
+const PERSIST_MAX_ATTEMPTS = 100
+
+export const docWritethroughProcessorQueue = createQueue(
+ JobQueue.DOC_WRITETHROUGH_QUEUE,
+ {
+ jobOptions: {
+ attempts: PERSIST_MAX_ATTEMPTS,
+ },
+ }
+)
+
+class DocWritethroughProcessor {
+ init() {
+ docWritethroughProcessorQueue.process(async message => {
+ try {
+ await this.persistToDb(message.data)
+ } catch (err: any) {
+ if (err.status === 409) {
+ // If we get a 409, it means that another job updated it meanwhile. We want to retry it to persist it again.
+ throw new Error(
+ `Conflict persisting message ${message.id}. Attempt ${message.attemptsMade}`
+ )
+ }
+
+ throw err
+ }
+ })
+ return this
+ }
+
+ private async persistToDb({
+ dbName,
+ docId,
+ data,
+ }: {
+ dbName: string
+ docId: string
+ data: Record
+ }) {
+ const db = dbUtils.getDB(dbName)
+ let doc: AnyDocument | undefined
+ try {
+ doc = await db.get(docId)
+ } catch {
+ doc = { _id: docId }
+ }
+
+ doc = { ...doc, ...data }
+ await db.put(doc)
+ }
+}
+
+export const processor = new DocWritethroughProcessor().init()
+
+export class DocWritethrough {
+ private db: Database
+ private _docId: string
+
+ constructor(db: Database, docId: string) {
+ this.db = db
+ this._docId = docId
+ }
+
+ get docId() {
+ return this._docId
+ }
+
+ async patch(data: Record) {
+ await docWritethroughProcessorQueue.add({
+ dbName: this.db.name,
+ docId: this.docId,
+ data,
+ })
+ }
+}
diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts
index 3ac323a8d4..2d6d8b9472 100644
--- a/packages/backend-core/src/cache/generic.ts
+++ b/packages/backend-core/src/cache/generic.ts
@@ -26,7 +26,8 @@ export const store = (...args: Parameters) =>
GENERIC.store(...args)
export const destroy = (...args: Parameters) =>
GENERIC.delete(...args)
-export const withCache = (...args: Parameters) =>
- GENERIC.withCache(...args)
+export const withCache = (
+ ...args: Parameters>
+) => GENERIC.withCache(...args)
export const bustCache = (...args: Parameters) =>
GENERIC.bustCache(...args)
diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts
index 4fa986e4e2..3b25108634 100644
--- a/packages/backend-core/src/cache/index.ts
+++ b/packages/backend-core/src/cache/index.ts
@@ -5,3 +5,4 @@ export * as writethrough from "./writethrough"
export * as invite from "./invite"
export * as passwordReset from "./passwordReset"
export * from "./generic"
+export * as docWritethrough from "./docWritethrough"
diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts
new file mode 100644
index 0000000000..d90c83afd3
--- /dev/null
+++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts
@@ -0,0 +1,288 @@
+import tk from "timekeeper"
+
+import _ from "lodash"
+import { DBTestConfiguration, generator, structures } from "../../../tests"
+import { getDB } from "../../db"
+
+import {
+ DocWritethrough,
+ docWritethroughProcessorQueue,
+} from "../docWritethrough"
+
+import InMemoryQueue from "../../queue/inMemoryQueue"
+
+const initialTime = Date.now()
+
+async function waitForQueueCompletion() {
+ const queue: InMemoryQueue = docWritethroughProcessorQueue as never
+ await queue.waitForCompletion()
+}
+
+describe("docWritethrough", () => {
+ const config = new DBTestConfiguration()
+
+ const db = getDB(structures.db.id())
+ let documentId: string
+ let docWritethrough: DocWritethrough
+
+ describe("patch", () => {
+ function generatePatchObject(fieldCount: number) {
+ const keys = generator.unique(() => generator.word(), fieldCount)
+ return keys.reduce((acc, c) => {
+ acc[c] = generator.word()
+ return acc
+ }, {} as Record)
+ }
+
+ beforeEach(async () => {
+ jest.clearAllMocks()
+ documentId = structures.uuid()
+ docWritethrough = new DocWritethrough(db, documentId)
+ })
+
+ it("patching will not persist until the messages are persisted", async () => {
+ await config.doInTenant(async () => {
+ await docWritethrough.patch(generatePatchObject(2))
+ await docWritethrough.patch(generatePatchObject(2))
+
+ expect(await db.exists(documentId)).toBe(false)
+ })
+ })
+
+ it("patching will persist when the messages are persisted", async () => {
+ await config.doInTenant(async () => {
+ const patch1 = generatePatchObject(2)
+ const patch2 = generatePatchObject(2)
+ await docWritethrough.patch(patch1)
+ await docWritethrough.patch(patch2)
+
+ await waitForQueueCompletion()
+
+ // This will not be persisted
+ const patch3 = generatePatchObject(3)
+ await docWritethrough.patch(patch3)
+
+ expect(await db.get(documentId)).toEqual({
+ _id: documentId,
+ ...patch1,
+ ...patch2,
+ _rev: expect.stringMatching(/2-.+/),
+ createdAt: new Date(initialTime).toISOString(),
+ updatedAt: new Date(initialTime).toISOString(),
+ })
+ })
+ })
+
+ it("patching will persist keeping the previous data", async () => {
+ await config.doInTenant(async () => {
+ const patch1 = generatePatchObject(2)
+ const patch2 = generatePatchObject(2)
+ await docWritethrough.patch(patch1)
+ await docWritethrough.patch(patch2)
+
+ await waitForQueueCompletion()
+
+ const patch3 = generatePatchObject(3)
+ await docWritethrough.patch(patch3)
+
+ await waitForQueueCompletion()
+
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining({
+ _id: documentId,
+ ...patch1,
+ ...patch2,
+ ...patch3,
+ })
+ )
+ })
+ })
+
+ it("date audit fields are set correctly when persisting", async () => {
+ await config.doInTenant(async () => {
+ const patch1 = generatePatchObject(2)
+ const patch2 = generatePatchObject(2)
+ await docWritethrough.patch(patch1)
+ const date1 = new Date()
+ await waitForQueueCompletion()
+ await docWritethrough.patch(patch2)
+
+ tk.travel(Date.now() + 100)
+ const date2 = new Date()
+ await waitForQueueCompletion()
+
+ expect(date1).not.toEqual(date2)
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining({
+ createdAt: date1.toISOString(),
+ updatedAt: date2.toISOString(),
+ })
+ )
+ })
+ })
+
+ it("concurrent patches will override keys", async () => {
+ await config.doInTenant(async () => {
+ const patch1 = generatePatchObject(2)
+ await docWritethrough.patch(patch1)
+ await waitForQueueCompletion()
+ const patch2 = generatePatchObject(1)
+ await docWritethrough.patch(patch2)
+
+ const keyToOverride = _.sample(Object.keys(patch1))!
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining({
+ [keyToOverride]: patch1[keyToOverride],
+ })
+ )
+
+ await waitForQueueCompletion()
+
+ const patch3 = {
+ ...generatePatchObject(3),
+ [keyToOverride]: generator.word(),
+ }
+ await docWritethrough.patch(patch3)
+ await waitForQueueCompletion()
+
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining({
+ ...patch1,
+ ...patch2,
+ ...patch3,
+ })
+ )
+ })
+ })
+
+ it("concurrent patches to different docWritethrough will not pollute each other", async () => {
+ await config.doInTenant(async () => {
+ const secondDocWritethrough = new DocWritethrough(
+ db,
+ structures.db.id()
+ )
+
+ const doc1Patch = generatePatchObject(2)
+ await docWritethrough.patch(doc1Patch)
+ const doc2Patch = generatePatchObject(1)
+ await secondDocWritethrough.patch(doc2Patch)
+
+ await waitForQueueCompletion()
+
+ const doc1Patch2 = generatePatchObject(3)
+ await docWritethrough.patch(doc1Patch2)
+ const doc2Patch2 = generatePatchObject(3)
+ await secondDocWritethrough.patch(doc2Patch2)
+ await waitForQueueCompletion()
+
+ expect(await db.get(docWritethrough.docId)).toEqual(
+ expect.objectContaining({
+ ...doc1Patch,
+ ...doc1Patch2,
+ })
+ )
+
+ expect(await db.get(secondDocWritethrough.docId)).toEqual(
+ expect.objectContaining({
+ ...doc2Patch,
+ ...doc2Patch2,
+ })
+ )
+ })
+ })
+
+ it("cached values are persisted only once", async () => {
+ await config.doInTenant(async () => {
+ const initialPatch = generatePatchObject(5)
+
+ await docWritethrough.patch(initialPatch)
+ await waitForQueueCompletion()
+
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining(initialPatch)
+ )
+
+ await db.remove(await db.get(documentId))
+
+ await waitForQueueCompletion()
+ const extraPatch = generatePatchObject(5)
+ await docWritethrough.patch(extraPatch)
+ await waitForQueueCompletion()
+
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining(extraPatch)
+ )
+ expect(await db.get(documentId)).not.toEqual(
+ expect.objectContaining(initialPatch)
+ )
+ })
+ })
+
+ it("concurrent calls will not cause conflicts", async () => {
+ async function parallelPatch(count: number) {
+ const patches = Array.from({ length: count }).map(() =>
+ generatePatchObject(1)
+ )
+ await Promise.all(patches.map(p => docWritethrough.patch(p)))
+
+ return patches.reduce((acc, c) => {
+ acc = { ...acc, ...c }
+ return acc
+ }, {})
+ }
+ const queueMessageSpy = jest.spyOn(docWritethroughProcessorQueue, "add")
+
+ await config.doInTenant(async () => {
+ let patches = await parallelPatch(5)
+ expect(queueMessageSpy).toBeCalledTimes(5)
+
+ await waitForQueueCompletion()
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining(patches)
+ )
+
+ patches = { ...patches, ...(await parallelPatch(40)) }
+ expect(queueMessageSpy).toBeCalledTimes(45)
+
+ await waitForQueueCompletion()
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining(patches)
+ )
+
+ patches = { ...patches, ...(await parallelPatch(10)) }
+ expect(queueMessageSpy).toBeCalledTimes(55)
+
+ await waitForQueueCompletion()
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining(patches)
+ )
+ })
+ })
+
+ // This is not yet supported
+ it.skip("patches will execute in order", async () => {
+ let incrementalValue = 0
+ const keyToOverride = generator.word()
+ async function incrementalPatches(count: number) {
+ for (let i = 0; i < count; i++) {
+ await docWritethrough.patch({ [keyToOverride]: incrementalValue++ })
+ }
+ }
+
+ await config.doInTenant(async () => {
+ await incrementalPatches(5)
+
+ await waitForQueueCompletion()
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining({ [keyToOverride]: 5 })
+ )
+
+ await incrementalPatches(40)
+ await waitForQueueCompletion()
+ expect(await db.get(documentId)).toEqual(
+ expect.objectContaining({ [keyToOverride]: 45 })
+ )
+ })
+ })
+ })
+})
diff --git a/packages/backend-core/src/cache/user.ts b/packages/backend-core/src/cache/user.ts
index 313b9a4d4a..ecfa20f99e 100644
--- a/packages/backend-core/src/cache/user.ts
+++ b/packages/backend-core/src/cache/user.ts
@@ -6,7 +6,7 @@ import env from "../environment"
import * as accounts from "../accounts"
import { UserDB } from "../users"
import { sdk } from "@budibase/shared-core"
-import { User } from "@budibase/types"
+import { User, UserMetadata } from "@budibase/types"
const EXPIRY_SECONDS = 3600
@@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600
*/
async function populateFromDB(userId: string, tenantId: string) {
const db = tenancy.getTenantDB(tenantId)
- const user = await db.get(userId)
+ const user = await db.get(userId)
user.budibaseAccess = true
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
const account = await accounts.getAccount(user.email)
diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts
index ac00483021..f4caac502e 100644
--- a/packages/backend-core/src/constants/db.ts
+++ b/packages/backend-core/src/constants/db.ts
@@ -57,6 +57,9 @@ export const StaticDatabases = {
AUDIT_LOGS: {
name: "audit-logs",
},
+ SCIM_LOGS: {
+ name: "scim-logs",
+ },
}
export const APP_PREFIX = prefixed(DocumentType.APP)
diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts
index 36fd5dcb48..ae86695168 100644
--- a/packages/backend-core/src/context/mainContext.ts
+++ b/packages/backend-core/src/context/mainContext.ts
@@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) {
}
}
+export function getScimDBName(tenantId?: string) {
+ if (!tenantId) {
+ tenantId = getTenantId()
+ }
+ if (tenantId === DEFAULT_TENANT_ID) {
+ return StaticDatabases.SCIM_LOGS.name
+ } else {
+ return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}`
+ }
+}
+
export function baseGlobalDBName(tenantId: string | undefined | null) {
if (!tenantId || tenantId === DEFAULT_TENANT_ID) {
return StaticDatabases.GLOBAL.name
diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts
index f91a37ce8f..9c960d76dd 100644
--- a/packages/backend-core/src/db/Replication.ts
+++ b/packages/backend-core/src/db/Replication.ts
@@ -1,66 +1,57 @@
+import PouchDB from "pouchdb"
import { getPouchDB, closePouchDB } from "./couch"
import { DocumentType } from "../constants"
class Replication {
- source: any
- target: any
- replication: any
+ source: PouchDB.Database
+ target: PouchDB.Database
- /**
- *
- * @param source - the DB you want to replicate or rollback to
- * @param target - the DB you want to replicate to, or rollback from
- */
- constructor({ source, target }: any) {
+ constructor({ source, target }: { source: string; target: string }) {
this.source = getPouchDB(source)
this.target = getPouchDB(target)
}
- close() {
- return Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
+ async close() {
+ await Promise.all([closePouchDB(this.source), closePouchDB(this.target)])
}
- promisify(operation: any, opts = {}) {
- return new Promise(resolve => {
- operation(this.target, opts)
- .on("denied", function (err: any) {
+ replicate(opts: PouchDB.Replication.ReplicateOptions = {}) {
+ return new Promise>(resolve => {
+ this.source.replicate
+ .to(this.target, opts)
+ .on("denied", function (err) {
// a document failed to replicate (e.g. due to permissions)
throw new Error(`Denied: Document failed to replicate ${err}`)
})
- .on("complete", function (info: any) {
+ .on("complete", function (info) {
return resolve(info)
})
- .on("error", function (err: any) {
+ .on("error", function (err) {
throw new Error(`Replication Error: ${err}`)
})
})
}
- /**
- * Two way replication operation, intended to be promise based.
- * @param opts - PouchDB replication options
- */
- sync(opts = {}) {
- this.replication = this.promisify(this.source.sync, opts)
- return this.replication
- }
+ appReplicateOpts(
+ opts: PouchDB.Replication.ReplicateOptions = {}
+ ): PouchDB.Replication.ReplicateOptions {
+ if (typeof opts.filter === "string") {
+ return opts
+ }
- /**
- * One way replication operation, intended to be promise based.
- * @param opts - PouchDB replication options
- */
- replicate(opts = {}) {
- this.replication = this.promisify(this.source.replicate.to, opts)
- return this.replication
- }
+ const filter = opts.filter
+ delete opts.filter
- appReplicateOpts() {
return {
- filter: (doc: any) => {
+ ...opts,
+ filter: (doc: any, params: any) => {
if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) {
return false
}
- return doc._id !== DocumentType.APP_METADATA
+ if (doc._id === DocumentType.APP_METADATA) {
+ return false
+ }
+ return filter ? filter(doc, params) : true
},
}
}
@@ -75,10 +66,6 @@ class Replication {
// take the opportunity to remove deleted tombstones
await this.replicate()
}
-
- cancel() {
- this.replication.cancel()
- }
}
export default Replication
diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts
index 0e2b4173b0..416313f520 100644
--- a/packages/backend-core/src/db/couch/DatabaseImpl.ts
+++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts
@@ -11,6 +11,7 @@ import {
Document,
isDocument,
RowResponse,
+ RowValue,
} from "@budibase/types"
import { getCouchInfo } from "./connections"
import { directCouchUrlCall } from "./utils"
@@ -69,7 +70,15 @@ export class DatabaseImpl implements Database {
DatabaseImpl.nano = buildNano(couchInfo)
}
- async exists() {
+ exists(docId?: string) {
+ if (docId === undefined) {
+ return this.dbExists()
+ }
+
+ return this.docExists(docId)
+ }
+
+ private async dbExists() {
const response = await directCouchUrlCall({
url: `${this.couchInfo.url}/${this.name}`,
method: "HEAD",
@@ -78,6 +87,15 @@ export class DatabaseImpl implements Database {
return response.status === 200
}
+ private async docExists(id: string): Promise {
+ try {
+ await this.performCall(db => () => db.head(id))
+ return true
+ } catch {
+ return false
+ }
+ }
+
private nano() {
return this.instanceNano || DatabaseImpl.nano
}
@@ -221,7 +239,7 @@ export class DatabaseImpl implements Database {
})
}
- async allDocs(
+ async allDocs(
params: DatabaseQueryOpts
): Promise> {
return this.performCall(db => {
diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts
index aa2ac424ae..795f30d7cd 100644
--- a/packages/backend-core/src/db/instrumentation.ts
+++ b/packages/backend-core/src/db/instrumentation.ts
@@ -1,5 +1,4 @@
import {
- DocumentScope,
DocumentDestroyResponse,
DocumentInsertResponse,
DocumentBulkResponse,
@@ -13,6 +12,7 @@ import {
DatabasePutOpts,
DatabaseQueryOpts,
Document,
+ RowValue,
} from "@budibase/types"
import tracer from "dd-trace"
import { Writable } from "stream"
@@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database {
return this.db.name
}
- exists(): Promise {
+ exists(docId?: string): Promise {
return tracer.trace("db.exists", span => {
- span?.addTags({ db_name: this.name })
+ span?.addTags({ db_name: this.name, doc_id: docId })
+ if (docId) {
+ return this.db.exists(docId)
+ }
return this.db.exists()
})
}
@@ -79,7 +82,7 @@ export class DDInstrumentedDatabase implements Database {
})
}
- allDocs(
+ allDocs(
params: DatabaseQueryOpts
): Promise> {
return tracer.trace("db.allDocs", span => {
diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts
new file mode 100644
index 0000000000..586f13f417
--- /dev/null
+++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts
@@ -0,0 +1,55 @@
+import _ from "lodash"
+import { AnyDocument } from "@budibase/types"
+import { generator } from "../../../tests"
+import { DatabaseImpl } from "../couch"
+import { newid } from "../../utils"
+
+describe("DatabaseImpl", () => {
+ const database = new DatabaseImpl(generator.word())
+ const documents: AnyDocument[] = []
+
+ beforeAll(async () => {
+ const docsToCreate = Array.from({ length: 10 }).map(() => ({
+ _id: newid(),
+ }))
+ const createdDocs = await database.bulkDocs(docsToCreate)
+
+ documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev })))
+ })
+
+ describe("document exists", () => {
+ it("can check existing docs by id", async () => {
+ const existingDoc = _.sample(documents)
+ const result = await database.exists(existingDoc!._id!)
+
+ expect(result).toBe(true)
+ })
+
+ it("can check non existing docs by id", async () => {
+ const result = await database.exists(newid())
+
+ expect(result).toBe(false)
+ })
+
+ it("can check an existing doc by id multiple times", async () => {
+ const existingDoc = _.sample(documents)
+ const id = existingDoc!._id!
+
+ const results = []
+ results.push(await database.exists(id))
+ results.push(await database.exists(id))
+ results.push(await database.exists(id))
+
+ expect(results).toEqual([true, true, true])
+ })
+
+ it("returns false after the doc is deleted", async () => {
+ const existingDoc = _.sample(documents)
+ const id = existingDoc!._id!
+ expect(await database.exists(id)).toBe(true)
+
+ await database.remove(existingDoc!)
+ expect(await database.exists(id)).toBe(false)
+ })
+ })
+})
diff --git a/packages/backend-core/src/docIds/ids.ts b/packages/backend-core/src/docIds/ids.ts
index 02176109da..9627b2b94c 100644
--- a/packages/backend-core/src/docIds/ids.ts
+++ b/packages/backend-core/src/docIds/ids.ts
@@ -74,7 +74,7 @@ export function getGlobalIDFromUserMetadataID(id: string) {
* Generates a template ID.
* @param ownerId The owner/user of the template, this could be global or a workspace level.
*/
-export function generateTemplateID(ownerId: any) {
+export function generateTemplateID(ownerId: string) {
return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}`
}
@@ -105,7 +105,7 @@ export function prefixRoleID(name: string) {
* Generates a new dev info document ID - this is scoped to a user.
* @returns The new dev info ID which info for dev (like api key) can be stored under.
*/
-export const generateDevInfoID = (userId: any) => {
+export const generateDevInfoID = (userId: string) => {
return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}`
}
diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts
index b3179cbeea..2da2a77d67 100644
--- a/packages/backend-core/src/environment.ts
+++ b/packages/backend-core/src/environment.ts
@@ -186,6 +186,7 @@ const environment = {
environment[key] = value
},
ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M",
+ DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS,
}
// clean up any environment variable edge cases
diff --git a/packages/backend-core/src/middleware/errorHandling.ts b/packages/backend-core/src/middleware/errorHandling.ts
index ebdd4107e9..2b8f7195ed 100644
--- a/packages/backend-core/src/middleware/errorHandling.ts
+++ b/packages/backend-core/src/middleware/errorHandling.ts
@@ -1,5 +1,6 @@
import { APIError } from "@budibase/types"
import * as errors from "../errors"
+import environment from "../environment"
export async function errorHandling(ctx: any, next: any) {
try {
@@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) {
console.error(err)
}
- const error = errors.getPublicError(err)
- const body: APIError = {
+ let error: APIError = {
message: err.message,
status: status,
validationErrors: err.validation,
- error,
+ error: errors.getPublicError(err),
}
- ctx.body = body
+ if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) {
+ // @ts-ignore
+ error.stack = err.stack
+ }
+
+ ctx.body = error
}
}
diff --git a/packages/backend-core/src/objectStore/buckets/plugins.ts b/packages/backend-core/src/objectStore/buckets/plugins.ts
index 6f1b7116ae..02be9345ab 100644
--- a/packages/backend-core/src/objectStore/buckets/plugins.ts
+++ b/packages/backend-core/src/objectStore/buckets/plugins.ts
@@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types"
// URLS
-export function enrichPluginURLs(plugins: Plugin[]) {
+export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] {
if (!plugins || !plugins.length) {
return []
}
diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts
index eb4f21aced..a095c6c769 100644
--- a/packages/backend-core/src/queue/constants.ts
+++ b/packages/backend-core/src/queue/constants.ts
@@ -4,4 +4,5 @@ export enum JobQueue {
AUDIT_LOG = "auditLogQueue",
SYSTEM_EVENT_QUEUE = "systemEventQueue",
APP_MIGRATION = "appMigration",
+ DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue",
}
diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts
index c05bbffbe9..afb5592562 100644
--- a/packages/backend-core/src/queue/inMemoryQueue.ts
+++ b/packages/backend-core/src/queue/inMemoryQueue.ts
@@ -1,5 +1,14 @@
import events from "events"
-import { timeout } from "../utils"
+import { newid, timeout } from "../utils"
+import { Queue, QueueOptions, JobOptions } from "./queue"
+
+interface JobMessage {
+ id: string
+ timestamp: number
+ queue: string
+ data: any
+ opts?: JobOptions
+}
/**
* Bull works with a Job wrapper around all messages that contains a lot more information about
@@ -10,12 +19,13 @@ import { timeout } from "../utils"
* @returns A new job which can now be put onto the queue, this is mostly an
* internal structure so that an in memory queue can be easily swapped for a Bull queue.
*/
-function newJob(queue: string, message: any) {
+function newJob(queue: string, message: any, opts?: JobOptions): JobMessage {
return {
+ id: newid(),
timestamp: Date.now(),
queue: queue,
data: message,
- opts: {},
+ opts,
}
}
@@ -24,26 +34,29 @@ function newJob(queue: string, message: any) {
* It is relatively simple, using an event emitter internally to register when messages are available
* to the consumers - in can support many inputs and many consumers.
*/
-class InMemoryQueue {
+class InMemoryQueue implements Partial {
_name: string
- _opts?: any
- _messages: any[]
+ _opts?: QueueOptions
+ _messages: JobMessage[]
+ _queuedJobIds: Set
_emitter: EventEmitter
_runCount: number
_addCount: number
+
/**
* The constructor the queue, exactly the same as that of Bulls.
* @param name The name of the queue which is being configured.
* @param opts This is not used by the in memory queue as there is no real use
* case when in memory, but is the same API as Bull
*/
- constructor(name: string, opts?: any) {
+ constructor(name: string, opts?: QueueOptions) {
this._name = name
this._opts = opts
this._messages = []
this._emitter = new events.EventEmitter()
this._runCount = 0
this._addCount = 0
+ this._queuedJobIds = new Set()
}
/**
@@ -55,22 +68,42 @@ class InMemoryQueue {
* note this is incredibly limited compared to Bull as in reality the Job would contain
* a lot more information about the queue and current status of Bull cluster.
*/
- process(func: any) {
+ async process(func: any) {
this._emitter.on("message", async () => {
if (this._messages.length <= 0) {
return
}
let msg = this._messages.shift()
+
let resp = func(msg)
+
+ async function retryFunc(fnc: any) {
+ try {
+ await fnc
+ } catch (e: any) {
+ await new Promise(r => setTimeout(() => r(), 50))
+
+ await retryFunc(func(msg))
+ }
+ }
+
if (resp.then != null) {
- await resp
+ try {
+ await retryFunc(resp)
+ } catch (e: any) {
+ console.error(e)
+ }
}
this._runCount++
+ const jobId = msg?.opts?.jobId?.toString()
+ if (jobId && msg?.opts?.removeOnComplete) {
+ this._queuedJobIds.delete(jobId)
+ }
})
}
async isReady() {
- return true
+ return this as any
}
// simply puts a message to the queue and emits to the queue for processing
@@ -83,27 +116,45 @@ class InMemoryQueue {
* @param repeat serves no purpose for the import queue.
*/
// eslint-disable-next-line no-unused-vars
- add(msg: any, repeat: boolean) {
- if (typeof msg !== "object") {
+ async add(data: any, opts?: JobOptions) {
+ const jobId = opts?.jobId?.toString()
+ if (jobId && this._queuedJobIds.has(jobId)) {
+ console.log(`Ignoring already queued job ${jobId}`)
+ return
+ }
+
+ if (typeof data !== "object") {
throw "Queue only supports carrying JSON."
}
- this._messages.push(newJob(this._name, msg))
- this._addCount++
- this._emitter.emit("message")
+ if (jobId) {
+ this._queuedJobIds.add(jobId)
+ }
+
+ const pushMessage = () => {
+ this._messages.push(newJob(this._name, data, opts))
+ this._addCount++
+ this._emitter.emit("message")
+ }
+
+ const delay = opts?.delay
+ if (delay) {
+ setTimeout(pushMessage, delay)
+ } else {
+ pushMessage()
+ }
+ return {} as any
}
/**
* replicating the close function from bull, which waits for jobs to finish.
*/
- async close() {
- return []
- }
+ async close() {}
/**
* This removes a cron which has been implemented, this is part of Bull API.
* @param cronJobId The cron which is to be removed.
*/
- removeRepeatableByKey(cronJobId: string) {
+ async removeRepeatableByKey(cronJobId: string) {
// TODO: implement for testing
console.log(cronJobId)
}
@@ -111,12 +162,12 @@ class InMemoryQueue {
/**
* Implemented for tests
*/
- getRepeatableJobs() {
+ async getRepeatableJobs() {
return []
}
// eslint-disable-next-line no-unused-vars
- removeJobs(pattern: string) {
+ async removeJobs(pattern: string) {
// no-op
}
@@ -128,18 +179,22 @@ class InMemoryQueue {
}
async getJob() {
- return {}
+ return null
}
on() {
// do nothing
- return this
+ return this as any
}
async waitForCompletion() {
do {
await timeout(50)
- } while (this._addCount < this._runCount)
+ } while (this.hasRunningJobs())
+ }
+
+ hasRunningJobs() {
+ return this._addCount > this._runCount
}
}
diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts
index 063a01bd2f..14dce5fe8d 100644
--- a/packages/backend-core/src/queue/listeners.ts
+++ b/packages/backend-core/src/queue/listeners.ts
@@ -88,6 +88,7 @@ enum QueueEventType {
AUDIT_LOG_EVENT = "audit-log-event",
SYSTEM_EVENT = "system-event",
APP_MIGRATION = "app-migration",
+ DOC_WRITETHROUGH = "doc-writethrough",
}
const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
@@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = {
[JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT,
[JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT,
[JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION,
+ [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH,
}
function logging(queue: Queue, jobQueue: JobQueue) {
diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts
index 0bcb25a35f..1838eed92f 100644
--- a/packages/backend-core/src/queue/queue.ts
+++ b/packages/backend-core/src/queue/queue.ts
@@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners"
import { Duration } from "../utils"
import * as timers from "../timers"
+export { QueueOptions, Queue, JobOptions } from "bull"
+
// the queue lock is held for 5 minutes
const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs()
// queue lock is refreshed every 30 seconds
diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts
index f3bcee3209..7920dfed2d 100644
--- a/packages/backend-core/src/redis/init.ts
+++ b/packages/backend-core/src/redis/init.ts
@@ -9,7 +9,8 @@ let userClient: Client,
lockClient: Client,
socketClient: Client,
inviteClient: Client,
- passwordResetClient: Client
+ passwordResetClient: Client,
+ docWritethroughClient: Client
export async function init() {
userClient = await new Client(utils.Databases.USER_CACHE).init()
@@ -24,6 +25,9 @@ export async function init() {
utils.Databases.SOCKET_IO,
utils.SelectableDatabase.SOCKET_IO
).init()
+ docWritethroughClient = await new Client(
+ utils.Databases.DOC_WRITE_THROUGH
+ ).init()
}
export async function shutdown() {
@@ -104,3 +108,10 @@ export async function getPasswordResetClient() {
}
return passwordResetClient
}
+
+export async function getDocWritethroughClient() {
+ if (!writethroughClient) {
+ await init()
+ }
+ return writethroughClient
+}
diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts
index d15453ba62..79f75421d3 100644
--- a/packages/backend-core/src/redis/redis.ts
+++ b/packages/backend-core/src/redis/redis.ts
@@ -1,5 +1,5 @@
import env from "../environment"
-import Redis from "ioredis"
+import Redis, { Cluster } from "ioredis"
// mock-redis doesn't have any typing
let MockRedis: any | undefined
if (env.MOCK_REDIS) {
@@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT
// for testing just generate the client once
let CLOSED = false
-let CLIENTS: { [key: number]: any } = {}
+const CLIENTS: Record = {}
let CONNECTED = false
// mock redis always connected
@@ -36,7 +36,7 @@ if (env.MOCK_REDIS) {
CONNECTED = true
}
-function pickClient(selectDb: number): any {
+function pickClient(selectDb: number) {
return CLIENTS[selectDb]
}
@@ -201,12 +201,15 @@ class RedisWrapper {
key = `${db}${SEPARATOR}${key}`
let stream
if (CLUSTERED) {
- let node = this.getClient().nodes("master")
+ let node = (this.getClient() as never as Cluster).nodes("master")
stream = node[0].scanStream({ match: key + "*", count: 100 })
} else {
- stream = this.getClient().scanStream({ match: key + "*", count: 100 })
+ stream = (this.getClient() as Redis).scanStream({
+ match: key + "*",
+ count: 100,
+ })
}
- return promisifyStream(stream, this.getClient())
+ return promisifyStream(stream, this.getClient() as any)
}
async keys(pattern: string) {
@@ -221,14 +224,16 @@ class RedisWrapper {
async get(key: string) {
const db = this._db
- let response = await this.getClient().get(addDbPrefix(db, key))
+ const response = await this.getClient().get(addDbPrefix(db, key))
// overwrite the prefixed key
+ // @ts-ignore
if (response != null && response.key) {
+ // @ts-ignore
response.key = key
}
// if its not an object just return the response
try {
- return JSON.parse(response)
+ return JSON.parse(response!)
} catch (err) {
return response
}
@@ -274,13 +279,37 @@ class RedisWrapper {
}
}
+ async bulkStore(
+ data: Record,
+ expirySeconds: number | null = null
+ ) {
+ const client = this.getClient()
+
+ const dataToStore = Object.entries(data).reduce((acc, [key, value]) => {
+ acc[addDbPrefix(this._db, key)] =
+ typeof value === "object" ? JSON.stringify(value) : value
+ return acc
+ }, {} as Record)
+
+ const pipeline = client.pipeline()
+ pipeline.mset(dataToStore)
+
+ if (expirySeconds !== null) {
+ for (const key of Object.keys(dataToStore)) {
+ pipeline.expire(key, expirySeconds)
+ }
+ }
+
+ await pipeline.exec()
+ }
+
async getTTL(key: string) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
return this.getClient().ttl(prefixedKey)
}
- async setExpiry(key: string, expirySeconds: number | null) {
+ async setExpiry(key: string, expirySeconds: number) {
const db = this._db
const prefixedKey = addDbPrefix(db, key)
await this.getClient().expire(prefixedKey, expirySeconds)
@@ -291,10 +320,35 @@ class RedisWrapper {
await this.getClient().del(addDbPrefix(db, key))
}
+ async bulkDelete(keys: string[]) {
+ const db = this._db
+ await this.getClient().del(keys.map(key => addDbPrefix(db, key)))
+ }
+
async clear() {
let items = await this.scan()
await Promise.all(items.map((obj: any) => this.delete(obj.key)))
}
+
+ async increment(key: string) {
+ const result = await this.getClient().incr(addDbPrefix(this._db, key))
+ if (isNaN(result)) {
+ throw new Error(`Redis ${key} does not contain a number`)
+ }
+ return result
+ }
+
+ async deleteIfValue(key: string, value: any) {
+ const client = this.getClient()
+
+ const luaScript = `
+ if redis.call('GET', KEYS[1]) == ARGV[1] then
+ redis.call('DEL', KEYS[1])
+ end
+ `
+
+ await client.eval(luaScript, 1, addDbPrefix(this._db, key), value)
+ }
}
export default RedisWrapper
diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts
index 7009dc6f55..adeb5b12ec 100644
--- a/packages/backend-core/src/redis/redlockImpl.ts
+++ b/packages/backend-core/src/redis/redlockImpl.ts
@@ -72,7 +72,7 @@ const OPTIONS: Record = {
export async function newRedlock(opts: Redlock.Options = {}) {
const options = { ...OPTIONS.DEFAULT, ...opts }
const redisWrapper = await getLockClient()
- const client = redisWrapper.getClient()
+ const client = redisWrapper.getClient() as any
return new Redlock([client], options)
}
diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts
new file mode 100644
index 0000000000..c2c9e4a14e
--- /dev/null
+++ b/packages/backend-core/src/redis/tests/redis.spec.ts
@@ -0,0 +1,214 @@
+import { GenericContainer, StartedTestContainer } from "testcontainers"
+import { generator, structures } from "../../../tests"
+import RedisWrapper from "../redis"
+import { env } from "../.."
+
+jest.setTimeout(30000)
+
+describe("redis", () => {
+ let redis: RedisWrapper
+ let container: StartedTestContainer
+
+ beforeAll(async () => {
+ const container = await new GenericContainer("redis")
+ .withExposedPorts(6379)
+ .start()
+
+ env._set(
+ "REDIS_URL",
+ `${container.getHost()}:${container.getMappedPort(6379)}`
+ )
+ env._set("MOCK_REDIS", 0)
+ env._set("REDIS_PASSWORD", 0)
+ })
+
+ afterAll(() => container?.stop())
+
+ beforeEach(async () => {
+ redis = new RedisWrapper(structures.db.id())
+ await redis.init()
+ })
+
+ describe("store", () => {
+ it("a basic value can be persisted", async () => {
+ const key = structures.uuid()
+ const value = generator.word()
+
+ await redis.store(key, value)
+
+ expect(await redis.get(key)).toEqual(value)
+ })
+
+ it("objects can be persisted", async () => {
+ const key = structures.uuid()
+ const value = { [generator.word()]: generator.word() }
+
+ await redis.store(key, value)
+
+ expect(await redis.get(key)).toEqual(value)
+ })
+ })
+
+ describe("bulkStore", () => {
+ function createRandomObject(
+ keyLength: number,
+ valueGenerator: () => any = () => generator.word()
+ ) {
+ return generator
+ .unique(() => generator.word(), keyLength)
+ .reduce((acc, key) => {
+ acc[key] = valueGenerator()
+ return acc
+ }, {} as Record)
+ }
+
+ it("a basic object can be persisted", async () => {
+ const data = createRandomObject(10)
+
+ await redis.bulkStore(data)
+
+ for (const [key, value] of Object.entries(data)) {
+ expect(await redis.get(key)).toEqual(value)
+ }
+
+ expect(await redis.keys("*")).toHaveLength(10)
+ })
+
+ it("a complex object can be persisted", async () => {
+ const data = {
+ ...createRandomObject(10, () => createRandomObject(5)),
+ ...createRandomObject(5),
+ }
+
+ await redis.bulkStore(data)
+
+ for (const [key, value] of Object.entries(data)) {
+ expect(await redis.get(key)).toEqual(value)
+ }
+
+ expect(await redis.keys("*")).toHaveLength(15)
+ })
+
+ it("no TTL is set by default", async () => {
+ const data = createRandomObject(10)
+
+ await redis.bulkStore(data)
+
+ for (const [key, value] of Object.entries(data)) {
+ expect(await redis.get(key)).toEqual(value)
+ expect(await redis.getTTL(key)).toEqual(-1)
+ }
+ })
+
+ it("a bulk store can be persisted with TTL", async () => {
+ const ttl = 500
+ const data = createRandomObject(8)
+
+ await redis.bulkStore(data, ttl)
+
+ for (const [key, value] of Object.entries(data)) {
+ expect(await redis.get(key)).toEqual(value)
+ expect(await redis.getTTL(key)).toEqual(ttl)
+ }
+
+ expect(await redis.keys("*")).toHaveLength(8)
+ })
+
+ it("setting a TTL of -1 will not persist the key", async () => {
+ const ttl = -1
+ const data = createRandomObject(5)
+
+ await redis.bulkStore(data, ttl)
+
+ for (const [key, value] of Object.entries(data)) {
+ expect(await redis.get(key)).toBe(null)
+ }
+
+ expect(await redis.keys("*")).toHaveLength(0)
+ })
+ })
+
+ describe("increment", () => {
+ it("can increment on a new key", async () => {
+ const key = structures.uuid()
+ const result = await redis.increment(key)
+ expect(result).toBe(1)
+ })
+
+ it("can increment multiple times", async () => {
+ const key = structures.uuid()
+ const results = [
+ await redis.increment(key),
+ await redis.increment(key),
+ await redis.increment(key),
+ await redis.increment(key),
+ await redis.increment(key),
+ ]
+ expect(results).toEqual([1, 2, 3, 4, 5])
+ })
+
+ it("can increment on a new key", async () => {
+ const key1 = structures.uuid()
+ const key2 = structures.uuid()
+
+ const result1 = await redis.increment(key1)
+ expect(result1).toBe(1)
+
+ const result2 = await redis.increment(key2)
+ expect(result2).toBe(1)
+ })
+
+ it("can increment multiple times in parallel", async () => {
+ const key = structures.uuid()
+ const results = await Promise.all(
+ Array.from({ length: 100 }).map(() => redis.increment(key))
+ )
+ expect(results).toHaveLength(100)
+ expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1))
+ })
+
+ it("can increment existing set keys", async () => {
+ const key = structures.uuid()
+ await redis.store(key, 70)
+ await redis.increment(key)
+
+ const result = await redis.increment(key)
+ expect(result).toBe(72)
+ })
+
+ it.each([
+ generator.word(),
+ generator.bool(),
+ { [generator.word()]: generator.word() },
+ ])("cannot increment if the store value is not a number", async value => {
+ const key = structures.uuid()
+ await redis.store(key, value)
+
+ await expect(redis.increment(key)).rejects.toThrowError(
+ "ERR value is not an integer or out of range"
+ )
+ })
+ })
+
+ describe("deleteIfValue", () => {
+ it("can delete if the value matches", async () => {
+ const key = structures.uuid()
+ const value = generator.word()
+ await redis.store(key, value)
+
+ await redis.deleteIfValue(key, value)
+
+ expect(await redis.get(key)).toBeNull()
+ })
+
+ it("will not delete if the value does not matches", async () => {
+ const key = structures.uuid()
+ const value = generator.word()
+ await redis.store(key, value)
+
+ await redis.deleteIfValue(key, generator.word())
+
+ expect(await redis.get(key)).toEqual(value)
+ })
+ })
+})
diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts
index 7b93458b52..7f84f11467 100644
--- a/packages/backend-core/src/redis/utils.ts
+++ b/packages/backend-core/src/redis/utils.ts
@@ -30,6 +30,7 @@ export enum Databases {
LOCKS = "locks",
SOCKET_IO = "socket_io",
BPM_EVENTS = "bpmEvents",
+ DOC_WRITE_THROUGH = "docWriteThrough",
}
/**
diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts
index 4f048c0a11..a64be6b319 100644
--- a/packages/backend-core/src/security/roles.ts
+++ b/packages/backend-core/src/security/roles.ts
@@ -84,25 +84,24 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } {
return cloneDeep(BUILTIN_ROLES)
}
-export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map(
- role => role._id
-)
+export function isBuiltin(role: string) {
+ return getBuiltinRole(role) !== undefined
+}
-export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map(
- role => role.name
-)
-
-export function isBuiltin(role?: string) {
- return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin))
+export function getBuiltinRole(roleId: string): Role | undefined {
+ const role = Object.values(BUILTIN_ROLES).find(role =>
+ roleId.includes(role._id)
+ )
+ if (!role) {
+ return undefined
+ }
+ return cloneDeep(role)
}
/**
* Works through the inheritance ranks to see how far up the builtin stack this ID is.
*/
-export function builtinRoleToNumber(id?: string) {
- if (!id) {
- return 0
- }
+export function builtinRoleToNumber(id: string) {
const builtins = getBuiltinRoles()
const MAX = Object.values(builtins).length + 1
if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) {
@@ -123,7 +122,7 @@ export function builtinRoleToNumber(id?: string) {
/**
* Converts any role to a number, but has to be async to get the roles from db.
*/
-export async function roleToNumber(id?: string) {
+export async function roleToNumber(id: string) {
if (isBuiltin(id)) {
return builtinRoleToNumber(id)
}
@@ -131,7 +130,7 @@ export async function roleToNumber(id?: string) {
defaultPublic: true,
})) as RoleDoc[]
for (let role of hierarchy) {
- if (isBuiltin(role?.inherits)) {
+ if (role?.inherits && isBuiltin(role.inherits)) {
return builtinRoleToNumber(role.inherits) + 1
}
}
@@ -161,35 +160,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string {
* @returns The role object, which may contain an "inherits" property.
*/
export async function getRole(
- roleId?: string,
+ roleId: string,
opts?: { defaultPublic?: boolean }
-): Promise {
- if (!roleId) {
- return undefined
- }
- let role: any = {}
+): Promise {
// built in roles mostly come from the in-code implementation,
// but can be extended by a doc stored about them (e.g. permissions)
- if (isBuiltin(roleId)) {
- role = cloneDeep(
- Object.values(BUILTIN_ROLES).find(role => role._id === roleId)
- )
- } else {
+ let role: RoleDoc | undefined = getBuiltinRole(roleId)
+ if (!role) {
// make sure has the prefix (if it has it then it won't be added)
roleId = prefixRoleID(roleId)
}
try {
const db = getAppDB()
- const dbRole = await db.get(getDBRoleID(roleId))
- role = Object.assign(role, dbRole)
+ const dbRole = await db.get(getDBRoleID(roleId))
+ role = Object.assign(role || {}, dbRole)
// finalise the ID
- role._id = getExternalRoleID(role._id, role.version)
+ role._id = getExternalRoleID(role._id!, role.version)
} catch (err) {
if (!isBuiltin(roleId) && opts?.defaultPublic) {
return cloneDeep(BUILTIN_ROLES.PUBLIC)
}
// only throw an error if there is no role at all
- if (Object.keys(role).length === 0) {
+ if (!role || Object.keys(role).length === 0) {
throw err
}
}
@@ -200,7 +192,7 @@ export async function getRole(
* Simple function to get all the roles based on the top level user role ID.
*/
async function getAllUserRoles(
- userRoleId?: string,
+ userRoleId: string,
opts?: { defaultPublic?: boolean }
): Promise {
// admins have access to all roles
@@ -226,7 +218,7 @@ async function getAllUserRoles(
}
export async function getUserRoleIdHierarchy(
- userRoleId?: string
+ userRoleId: string
): Promise {
const roles = await getUserRoleHierarchy(userRoleId)
return roles.map(role => role._id!)
@@ -241,7 +233,7 @@ export async function getUserRoleIdHierarchy(
* highest level of access and the last being the lowest level.
*/
export async function getUserRoleHierarchy(
- userRoleId?: string,
+ userRoleId: string,
opts?: { defaultPublic?: boolean }
) {
// special case, if they don't have a role then they are a public user
@@ -265,9 +257,9 @@ export function checkForRoleResourceArray(
return rolePerms
}
-export async function getAllRoleIds(appId?: string) {
+export async function getAllRoleIds(appId: string): Promise {
const roles = await getAllRoles(appId)
- return roles.map(role => role._id)
+ return roles.map(role => role._id!)
}
/**
diff --git a/packages/backend-core/tests/core/utilities/structures/accounts.ts b/packages/backend-core/tests/core/utilities/structures/accounts.ts
index 515f94db1e..7dcc2de116 100644
--- a/packages/backend-core/tests/core/utilities/structures/accounts.ts
+++ b/packages/backend-core/tests/core/utilities/structures/accounts.ts
@@ -18,7 +18,7 @@ export const account = (partial: Partial = {}): Account => {
return {
accountId: uuid(),
tenantId: generator.word(),
- email: generator.email(),
+ email: generator.email({ domain: "example.com" }),
tenantName: generator.word(),
hosting: Hosting.SELF,
createdAt: Date.now(),
diff --git a/packages/backend-core/tests/core/utilities/structures/scim.ts b/packages/backend-core/tests/core/utilities/structures/scim.ts
index 80f41c605d..f424b2881a 100644
--- a/packages/backend-core/tests/core/utilities/structures/scim.ts
+++ b/packages/backend-core/tests/core/utilities/structures/scim.ts
@@ -13,7 +13,7 @@ interface CreateUserRequestFields {
export function createUserRequest(userData?: Partial) {
const defaultValues = {
externalId: uuid(),
- email: generator.email(),
+ email: `${uuid()}@example.com`,
firstName: generator.first(),
lastName: generator.last(),
username: generator.name(),
diff --git a/packages/bbui/src/Actions/position_dropdown.js b/packages/bbui/src/Actions/position_dropdown.js
index cc169eac09..d259b9197a 100644
--- a/packages/bbui/src/Actions/position_dropdown.js
+++ b/packages/bbui/src/Actions/position_dropdown.js
@@ -35,7 +35,10 @@ export default function positionDropdown(element, opts) {
}
if (typeof customUpdate === "function") {
- styles = customUpdate(anchorBounds, elementBounds, styles)
+ styles = customUpdate(anchorBounds, elementBounds, {
+ ...styles,
+ offset: opts.offset,
+ })
} else {
// Determine vertical styles
if (align === "right-outside") {
diff --git a/packages/builder/src/analytics/index.js b/packages/builder/src/analytics/index.js
index 6bb10acdb5..3a80a05d7f 100644
--- a/packages/builder/src/analytics/index.js
+++ b/packages/builder/src/analytics/index.js
@@ -9,13 +9,17 @@ const intercom = new IntercomClient(process.env.INTERCOM_TOKEN)
class AnalyticsHub {
constructor() {
this.clients = [posthog, intercom]
+ this.initialised = false
}
async activate() {
// Check analytics are enabled
const analyticsStatus = await API.getAnalyticsStatus()
- if (analyticsStatus.enabled) {
- this.clients.forEach(client => client.init())
+ if (analyticsStatus.enabled && !this.initialised) {
+ this.clients.forEach(client => {
+ client.init()
+ })
+ this.initialised = true
}
}
diff --git a/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte b/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte
index 6b9524776c..b54ecbf9fd 100644
--- a/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte
+++ b/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte
@@ -40,8 +40,15 @@
part2: PrettyRelationshipDefinitions.MANY,
},
}
- let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions)
- let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions)
+ $: relationshipOpts1 =
+ relationshipPart2 === PrettyRelationshipDefinitions.ONE
+ ? [PrettyRelationshipDefinitions.MANY]
+ : Object.values(PrettyRelationshipDefinitions)
+
+ $: relationshipOpts2 =
+ relationshipPart1 === PrettyRelationshipDefinitions.ONE
+ ? [PrettyRelationshipDefinitions.MANY]
+ : Object.values(PrettyRelationshipDefinitions)
let relationshipPart1 = PrettyRelationshipDefinitions.ONE
let relationshipPart2 = PrettyRelationshipDefinitions.MANY
diff --git a/packages/builder/src/components/common/HelpMenu.svelte b/packages/builder/src/components/common/HelpMenu.svelte
index f6e2f42c98..baff9a5a27 100644
--- a/packages/builder/src/components/common/HelpMenu.svelte
+++ b/packages/builder/src/components/common/HelpMenu.svelte
@@ -1,11 +1,11 @@
@@ -67,13 +95,30 @@
options={componentOptions}
on:change={() => (parameters.columns = [])}
/>
+
Export as
+
Export columns
- {
+ const columns = e.detail
+ parameters.columns = columns
+ parameters.customHeaders = columns.reduce((headerMap, column) => {
+ return {
+ [column.name]: column.displayName,
+ ...headerMap,
+ }
+ }, {})
+ }}
/>
@@ -97,8 +142,8 @@
.params {
display: grid;
column-gap: var(--spacing-xs);
- row-gap: var(--spacing-s);
- grid-template-columns: 90px 1fr;
+ row-gap: var(--spacing-m);
+ grid-template-columns: 90px 1fr 90px;
align-items: center;
}
diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte
index a1fe773455..d834e9aac9 100644
--- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte
+++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte
@@ -72,7 +72,10 @@
{#if parameters.confirm}
- Confirm text
+ Title
+
+
+ Text
- import EditComponentPopover from "../EditComponentPopover.svelte"
+ import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte"
import { Icon } from "@budibase/bbui"
import { runtimeToReadableBinding } from "dataBinding"
import { isJSBinding } from "@budibase/string-templates"
diff --git a/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte b/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte
index 2b9fa573c2..742ab785a1 100644
--- a/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte
+++ b/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte
@@ -29,6 +29,12 @@
allowLinks: true,
})
+ $: {
+ value = (value || []).filter(
+ column => (schema || {})[column.name || column] !== undefined
+ )
+ }
+
const getText = value => {
if (!value?.length) {
return "All columns"
diff --git a/packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte
similarity index 83%
rename from packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte
rename to packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte
index 4e645fe343..39e4bc2ada 100644
--- a/packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte
+++ b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte
@@ -3,6 +3,7 @@
import { componentStore } from "stores/builder"
import { cloneDeep } from "lodash/fp"
import { createEventDispatcher, getContext } from "svelte"
+ import { customPositionHandler } from "."
import ComponentSettingsSection from "pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte"
export let anchor
@@ -54,25 +55,6 @@
dispatch("change", nestedComponentInstance)
}
-
- const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
- let { left, top } = cfg
- let percentageOffset = 30
- // left-outside
- left = anchorBounds.left - eleBounds.width - 18
-
- // shift up from the anchor, if space allows
- let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
- let defaultTop = anchorBounds.top - offsetPos
-
- if (window.innerHeight - defaultTop < eleBounds.height) {
- top = window.innerHeight - eleBounds.height - 5
- } else {
- top = anchorBounds.top - offsetPos
- }
-
- return { ...cfg, left, top }
- }
0}
maxHeight={600}
+ offset={18}
handlePostionUpdate={customPositionHandler}
>
diff --git a/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js b/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js
new file mode 100644
index 0000000000..2dc3f60185
--- /dev/null
+++ b/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js
@@ -0,0 +1,18 @@
+export const customPositionHandler = (anchorBounds, eleBounds, cfg) => {
+ let { left, top, offset } = cfg
+ let percentageOffset = 30
+ // left-outside
+ left = anchorBounds.left - eleBounds.width - (offset || 5)
+
+ // shift up from the anchor, if space allows
+ let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset
+ let defaultTop = anchorBounds.top - offsetPos
+
+ if (window.innerHeight - defaultTop < eleBounds.height) {
+ top = window.innerHeight - eleBounds.height - 5
+ } else {
+ top = anchorBounds.top - offsetPos
+ }
+
+ return { ...cfg, left, top }
+}
diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte
index 27590a9858..771bcf20e0 100644
--- a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte
+++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte
@@ -1,5 +1,5 @@
-a11y-click-events-have-key-events
{displayValue}
@@ -140,10 +139,22 @@ a11y-click-events-have-key-events
{/each}