diff --git a/.eslintrc.json b/.eslintrc.json index 3de9d13046..ae9512152f 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -44,7 +44,8 @@ "no-undef": "off", "no-prototype-builtins": "off", "local-rules/no-budibase-imports": "error", - "local-rules/no-test-com": "error" + "local-rules/no-test-com": "error", + "local-rules/email-domain-example-com": "error" } }, { diff --git a/eslint-local-rules/index.js b/eslint-local-rules/index.js index 71bb5068da..177b0a129c 100644 --- a/eslint-local-rules/index.js +++ b/eslint-local-rules/index.js @@ -51,4 +51,41 @@ module.exports = { } }, }, + "email-domain-example-com": { + meta: { + type: "problem", + docs: { + description: + "enforce using the example.com domain for generator.email calls", + category: "Possible Errors", + recommended: false, + }, + fixable: "code", + schema: [], + }, + create: function (context) { + return { + CallExpression(node) { + if ( + node.callee.type === "MemberExpression" && + node.callee.object.name === "generator" && + node.callee.property.name === "email" && + node.arguments.length === 0 + ) { + context.report({ + node, + message: + "Prefer using generator.email with the domain \"{ domain: 'example.com' }\".", + fix: function (fixer) { + return fixer.replaceText( + node, + 'generator.email({ domain: "example.com" })' + ) + }, + }) + } + }, + } + }, + }, } diff --git a/i18n/README.kr.md b/i18n/README.kr.md new file mode 100644 index 0000000000..09fc83569b --- /dev/null +++ b/i18n/README.kr.md @@ -0,0 +1,221 @@ +

+ + Budibase + +

+

+ Budibase +

+

+ 자체 인프라에서 몇 분 만에 맞춤형 비즈니스 도구를 구축하세요. +

+

+ Budibase는 개발자와 IT 전문가가 몇 분 만에 맞춤형 애플리케이션을 구축하고 자동화할 수 있는 오픈 소스 로우코드 플랫폼입니다. +

+ +

+ 🤖 🎨 🚀 +

+ +

+ Budibase design ui +

+ +

+ + GitHub all releases + + + GitHub release (latest by date) + + + Follow @budibase + + Code of conduct + + + +

+ +

+ 소개 + · + 문서 + · + 기능 요청 + · + 버그 보고 + · + 지원: 토론 +

+ +

+## ✨ 특징 + +### "실제" 소프트웨어를 구축할 수 있습니다. +Budibase를 사용하면 고성능 단일 페이지 애플리케이션을 구축할 수 있습니다. 또한 반응형 디자인으로 제작하여 사용자에게 멋진 경험을 제공할 수 있습니다. +

+ +### 오픈 소스 및 확장성 +Budibase는 오픈소스이며, GPL v3 라이선스에 따라 공개되어 있습니다. 이는 Budibase가 항상 당신 곁에 있다는 안도감을 줄 것입니다. 그리고 우리는 개발자 친화적인 환경을 제공하고 있기 때문에, 당신은 원하는 만큼 소스 코드를 포크하여 수정하거나 Budibase에 직접 기여할 수 있습니다. +

+ +### 기존 데이터 또는 처음부터 시작 +Budibase를 사용하면 다음과 같은 여러 소스에서 데이터를 가져올 수 있습니다: MondoDB, CouchDB, PostgreSQL, MySQL, Airtable, S3, DynamoDB 또는 REST API. + +또는 원하는 경우 외부 도구 없이도 Budibase를 사용하여 처음부터 시작하여 자체 애플리케이션을 구축할 수 있습니다.[데이터 소스 제안](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). + +

+ Budibase data +

+

+ +### 강력한 내장 구성 요소로 애플리케이션을 설계하고 구축할 수 있습니다. + +Budibase에는 아름답게 디자인된 강력한 컴포넌트들이 제공되며, 이를 사용하여 UI를 쉽게 구축할 수 있습니다. 또한, CSS를 통한 스타일링 옵션도 풍부하게 제공되어 보다 창의적인 표현도 가능하다. + [Request new component](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). + +

+ Budibase design +

+

+ +### 프로세스를 자동화하고, 다른 도구와 연동하고, 웹훅으로 연결하세요! +워크플로우와 수동 프로세스를 자동화하여 시간을 절약하세요. 웹훅 이벤트 연결부터 이메일 자동화까지, Budibase에 수행할 작업을 지시하기만 하면 자동으로 처리됩니다. [새로운 자동화 만들기](https://github.com/Budibase/automations)또는[새로운 자동화를 요청할 수 있습니다](https://github.com/Budibase/budibase/discussions?discussions_q=category%3AIdeas). + +

+ Budibase automations +

+

+ +### 선호하는 도구 +Budibase는 사용자의 선호도에 따라 애플리케이션을 구축할 수 있는 다양한 도구를 통합하고 있습니다. + +

+ Budibase integrations +

+

+ +### 관리자의 천국 +Budibase는 어떤 규모의 프로젝트에도 유연하게 대응할 수 있으며, Budibase를 사용하면 개인 또는 조직의 서버에서 자체 호스팅하고 사용자, 온보딩, SMTP, 앱, 그룹, 테마 등을 한꺼번에 관리할 수 있습니다. 또한, 사용자나 그룹에 앱 포털을 제공하고 그룹 관리자에게 사용자 관리를 맡길 수도 있다. +- 프로모션 비디오: https://youtu.be/xoljVpty_Kw + +


+ +## 🏁 시작 + +Docker, Kubernetes 또는 Digital Ocean을 사용하여 자체 인프라에서 Budibase를 호스팅하거나, 걱정 없이 빠르게 애플리케이션을 구축하려는 경우 클라우드에서 Budibase를 사용할 수 있습니다. + +### [Budibase 셀프 호스팅으로 시작하기](https://docs.budibase.com/docs/hosting-methods) + +- [Docker - single ARM compatible image](https://docs.budibase.com/docs/docker) +- [Docker Compose](https://docs.budibase.com/docs/docker-compose) +- [Kubernetes](https://docs.budibase.com/docs/kubernetes-k8s) +- [Digital Ocean](https://docs.budibase.com/docs/digitalocean) +- [Portainer](https://docs.budibase.com/docs/portainer) + + +### [클라우드에서 Budibase 시작하기](https://budibase.com) + +

+ +## 🎓 Budibase 알아보기 + +문서 [documentacion de Budibase](https://docs.budibase.com/docs). +
+ + +

+ +## 💬 커뮤니티 + +질문하고, 다른 사람을 돕고, 다른 Budibase 사용자와 즐거운 대화를 나눌 수 있는 Budibase 커뮤니티에 여러분을 초대합니다. +[깃허브 토론](https://github.com/Budibase/budibase/discussions) +


+ + +## ❗ 행동강령 + +Budibase 는 모든 계층의 사람들을 환영하고 상호 존중하는 환경을 제공하는 데 특별한 주의를 기울이고 있습니다. 저희는 커뮤니티에도 같은 기대를 가지고 있습니다. +[**행동 강령**](https://github.com/Budibase/budibase/blob/HEAD/.github/CODE_OF_CONDUCT.md). +
+ +

+ + +## 🙌 Contribuir en Budibase + +버그 신고부터 코드의 버그 수정에 이르기까지 모든 기여를 감사하고 환영합니다. 새로운 기능을 구현하거나 API를 변경할 계획이 있다면 [여기에 새 메시지](https://github.com/Budibase/budibase/issues), +이렇게 하면 여러분의 노력이 헛되지 않도록 보장할 수 있습니다. + +여기에는 다음을 위해 Budibase 환경을 설정하는 방법에 대한 지침이 나와 있습니다. [여기를 클릭하세요](https://github.com/Budibase/budibase/tree/HEAD/docs/CONTRIBUTING.md). + +### 어디서부터 시작해야 할지 혼란스러우신가요? +이곳은 기여를 시작하기에 최적의 장소입니다! [First time issues project](https://github.com/Budibase/budibase/projects/22). + +### 리포지토리 구성 + +Budibase는 Lerna에서 관리하는 단일 리포지토리입니다. Lerna는 변경 사항이 있을 때마다 이를 동기화하여 Budibase 패키지를 빌드하고 게시합니다. 크게 보면 이러한 패키지가 Budibase를 구성하는 패키지입니다: + +- [packages/builder](https://github.com/Budibase/budibase/tree/HEAD/packages/builder) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다. + +- [packages/client](https://github.com/Budibase/budibase/tree/HEAD/packages/client) - budibase builder 클라이언트 측의 svelte 애플리케이션 코드가 포함되어 있습니다. + +- [packages/server](https://github.com/Budibase/budibase/tree/HEAD/packages/server) - Budibase의 서버 부분입니다. 이 Koa 애플리케이션은 빌더에게 Budibase 애플리케이션을 생성하는 데 필요한 것을 제공하는 역할을 합니다. 또한 데이터베이스 및 파일 저장소와 상호 작용할 수 있는 API를 제공합니다. + +자세한 내용은 다음 문서를 참조하세요. [CONTRIBUTING.md](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md) + +

+ + +## 📝 라이선스 + +Budibase는 오픈 소스이며, 라이선스는 다음과 같습니다 [GPL v3](https://www.gnu.org/licenses/gpl-3.0.en.html). 클라이언트 및 컴포넌트 라이브러리는 다음과 같이 라이선스가 부여됩니다. [MPL](https://directory.fsf.org/wiki/License:MPL-2.0) - 이렇게 하면 빌드한 애플리케이션에 원하는 대로 라이선스를 부여할 수 있습니다. + +

+ +## ⭐ 스타 수의 역사 + +[![Stargazers over time](https://starchart.cc/Budibase/budibase.svg)](https://starchart.cc/Budibase/budibase) + +빌더 업데이트 중 문제가 발생하는 경우 [여기](https://github.com/Budibase/budibase/blob/HEAD/docs/CONTRIBUTING.md#troubleshooting) 를 참고하여 환경을 정리해 주세요. + +

+ +## Contributors ✨ + +훌륭한 여러분께 감사할 따름입니다. ([emoji key](https://allcontributors.org/docs/en/emoji-key)): + + + + + + + + + + + + + + + + + + + + + + + + + +

Martin McKeaveney

💻 📖 ⚠️ 🚇

Michael Drury

📖 💻 ⚠️ 🚇

Andrew Kingston

📖 💻 ⚠️ 🎨

Michael Shanks

📖 💻 ⚠️

Kevin Åberg Kultalahti

📖 💻 ⚠️

Joe

📖 💻 🖋 🎨

Rory Powell

💻 📖 ⚠️

Peter Clement

💻 📖 ⚠️

Conor_Mack

💻 ⚠️

pngwn

💻 ⚠️

HugoLd

💻

victoriasloan

💻

yashank09

💻

SOVLOOKUP

💻

seoulaja

🌍

Maurits Lourens

⚠️ 💻
+ + + + + + +이 프로젝트는 다음 사양을 따릅니다. [all-contributors](https://github.com/all-contributors/all-contributors). +모든 종류의 기여를 환영합니다! diff --git a/lerna.json b/lerna.json index 54e106cd5a..a50794e91e 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.20.10", + "version": "2.21.4", "npmClient": "yarn", "packages": [ "packages/*", diff --git a/packages/account-portal b/packages/account-portal index de6d44c372..0c050591c2 160000 --- a/packages/account-portal +++ b/packages/account-portal @@ -1 +1 @@ -Subproject commit de6d44c372a7f48ca0ce8c6c0c19311d4bc21646 +Subproject commit 0c050591c21d3b67dc0c9225d60cc9e2324c8dac diff --git a/packages/backend-core/package.json b/packages/backend-core/package.json index 3f8c34f823..fe56780982 100644 --- a/packages/backend-core/package.json +++ b/packages/backend-core/package.json @@ -67,7 +67,7 @@ "@types/lodash": "4.14.200", "@types/node-fetch": "2.6.4", "@types/pouchdb": "6.4.0", - "@types/redlock": "4.0.3", + "@types/redlock": "4.0.7", "@types/semver": "7.3.7", "@types/tar-fs": "2.0.1", "@types/uuid": "8.3.4", @@ -78,6 +78,7 @@ "jest-serial-runner": "1.2.1", "pino-pretty": "10.0.0", "pouchdb-adapter-memory": "7.2.2", + "testcontainers": "^10.7.2", "timekeeper": "2.2.0", "typescript": "5.2.2" }, diff --git a/packages/backend-core/src/cache/base/index.ts b/packages/backend-core/src/cache/base/index.ts index 264984c6a5..74da4fe0d2 100644 --- a/packages/backend-core/src/cache/base/index.ts +++ b/packages/backend-core/src/cache/base/index.ts @@ -23,6 +23,18 @@ export default class BaseCache { return client.keys(pattern) } + async exists(key: string, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + return client.exists(key) + } + + async scan(key: string, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + return client.scan(key) + } + /** * Read only from the cache. */ @@ -32,6 +44,15 @@ export default class BaseCache { return client.get(key) } + /** + * Read only from the cache. + */ + async bulkGet(keys: string[], opts = { useTenancy: true }) { + keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys + const client = await this.getClient() + return client.bulkGet(keys) + } + /** * Write to the cache. */ @@ -46,6 +67,25 @@ export default class BaseCache { await client.store(key, value, ttl) } + /** + * Bulk write to the cache. + */ + async bulkStore( + data: Record, + ttl: number | null = null, + opts = { useTenancy: true } + ) { + if (opts.useTenancy) { + data = Object.entries(data).reduce((acc, [key, value]) => { + acc[generateTenantKey(key)] = value + return acc + }, {} as Record) + } + + const client = await this.getClient() + await client.bulkStore(data, ttl) + } + /** * Remove from cache. */ @@ -55,15 +95,24 @@ export default class BaseCache { return client.delete(key) } + /** + * Remove from cache. + */ + async bulkDelete(keys: string[], opts = { useTenancy: true }) { + keys = opts.useTenancy ? keys.map(key => generateTenantKey(key)) : keys + const client = await this.getClient() + return client.bulkDelete(keys) + } + /** * Read from the cache. Write to the cache if not exists. */ - async withCache( + async withCache( key: string, - ttl: number, - fetchFn: any, + ttl: number | null = null, + fetchFn: () => Promise | T, opts = { useTenancy: true } - ) { + ): Promise { const cachedValue = await this.get(key, opts) if (cachedValue) { return cachedValue @@ -89,4 +138,13 @@ export default class BaseCache { throw err } } + + /** + * Delete the entry if the provided value matches the stored one. + */ + async deleteIfValue(key: string, value: any, opts = { useTenancy: true }) { + key = opts.useTenancy ? generateTenantKey(key) : key + const client = await this.getClient() + await client.deleteIfValue(key, value) + } } diff --git a/packages/backend-core/src/cache/docWritethrough.ts b/packages/backend-core/src/cache/docWritethrough.ts new file mode 100644 index 0000000000..51018b2317 --- /dev/null +++ b/packages/backend-core/src/cache/docWritethrough.ts @@ -0,0 +1,86 @@ +import { AnyDocument, Database } from "@budibase/types" + +import { JobQueue, createQueue } from "../queue" +import * as dbUtils from "../db" + +interface ProcessDocMessage { + dbName: string + docId: string + data: Record +} + +const PERSIST_MAX_ATTEMPTS = 100 + +export const docWritethroughProcessorQueue = createQueue( + JobQueue.DOC_WRITETHROUGH_QUEUE, + { + jobOptions: { + attempts: PERSIST_MAX_ATTEMPTS, + }, + } +) + +class DocWritethroughProcessor { + init() { + docWritethroughProcessorQueue.process(async message => { + try { + await this.persistToDb(message.data) + } catch (err: any) { + if (err.status === 409) { + // If we get a 409, it means that another job updated it meanwhile. We want to retry it to persist it again. + throw new Error( + `Conflict persisting message ${message.id}. Attempt ${message.attemptsMade}` + ) + } + + throw err + } + }) + return this + } + + private async persistToDb({ + dbName, + docId, + data, + }: { + dbName: string + docId: string + data: Record + }) { + const db = dbUtils.getDB(dbName) + let doc: AnyDocument | undefined + try { + doc = await db.get(docId) + } catch { + doc = { _id: docId } + } + + doc = { ...doc, ...data } + await db.put(doc) + } +} + +export const processor = new DocWritethroughProcessor().init() + +export class DocWritethrough { + private db: Database + private _docId: string + + constructor(db: Database, docId: string) { + this.db = db + this._docId = docId + } + + get docId() { + return this._docId + } + + async patch(data: Record) { + await docWritethroughProcessorQueue.add({ + dbName: this.db.name, + docId: this.docId, + data, + }) + } +} diff --git a/packages/backend-core/src/cache/generic.ts b/packages/backend-core/src/cache/generic.ts index 3ac323a8d4..2d6d8b9472 100644 --- a/packages/backend-core/src/cache/generic.ts +++ b/packages/backend-core/src/cache/generic.ts @@ -26,7 +26,8 @@ export const store = (...args: Parameters) => GENERIC.store(...args) export const destroy = (...args: Parameters) => GENERIC.delete(...args) -export const withCache = (...args: Parameters) => - GENERIC.withCache(...args) +export const withCache = ( + ...args: Parameters> +) => GENERIC.withCache(...args) export const bustCache = (...args: Parameters) => GENERIC.bustCache(...args) diff --git a/packages/backend-core/src/cache/index.ts b/packages/backend-core/src/cache/index.ts index 4fa986e4e2..3b25108634 100644 --- a/packages/backend-core/src/cache/index.ts +++ b/packages/backend-core/src/cache/index.ts @@ -5,3 +5,4 @@ export * as writethrough from "./writethrough" export * as invite from "./invite" export * as passwordReset from "./passwordReset" export * from "./generic" +export * as docWritethrough from "./docWritethrough" diff --git a/packages/backend-core/src/cache/tests/docWritethrough.spec.ts b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts new file mode 100644 index 0000000000..d90c83afd3 --- /dev/null +++ b/packages/backend-core/src/cache/tests/docWritethrough.spec.ts @@ -0,0 +1,288 @@ +import tk from "timekeeper" + +import _ from "lodash" +import { DBTestConfiguration, generator, structures } from "../../../tests" +import { getDB } from "../../db" + +import { + DocWritethrough, + docWritethroughProcessorQueue, +} from "../docWritethrough" + +import InMemoryQueue from "../../queue/inMemoryQueue" + +const initialTime = Date.now() + +async function waitForQueueCompletion() { + const queue: InMemoryQueue = docWritethroughProcessorQueue as never + await queue.waitForCompletion() +} + +describe("docWritethrough", () => { + const config = new DBTestConfiguration() + + const db = getDB(structures.db.id()) + let documentId: string + let docWritethrough: DocWritethrough + + describe("patch", () => { + function generatePatchObject(fieldCount: number) { + const keys = generator.unique(() => generator.word(), fieldCount) + return keys.reduce((acc, c) => { + acc[c] = generator.word() + return acc + }, {} as Record) + } + + beforeEach(async () => { + jest.clearAllMocks() + documentId = structures.uuid() + docWritethrough = new DocWritethrough(db, documentId) + }) + + it("patching will not persist until the messages are persisted", async () => { + await config.doInTenant(async () => { + await docWritethrough.patch(generatePatchObject(2)) + await docWritethrough.patch(generatePatchObject(2)) + + expect(await db.exists(documentId)).toBe(false) + }) + }) + + it("patching will persist when the messages are persisted", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + await waitForQueueCompletion() + + // This will not be persisted + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + expect(await db.get(documentId)).toEqual({ + _id: documentId, + ...patch1, + ...patch2, + _rev: expect.stringMatching(/2-.+/), + createdAt: new Date(initialTime).toISOString(), + updatedAt: new Date(initialTime).toISOString(), + }) + }) + }) + + it("patching will persist keeping the previous data", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await docWritethrough.patch(patch2) + + await waitForQueueCompletion() + + const patch3 = generatePatchObject(3) + await docWritethrough.patch(patch3) + + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + _id: documentId, + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) + + it("date audit fields are set correctly when persisting", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + const patch2 = generatePatchObject(2) + await docWritethrough.patch(patch1) + const date1 = new Date() + await waitForQueueCompletion() + await docWritethrough.patch(patch2) + + tk.travel(Date.now() + 100) + const date2 = new Date() + await waitForQueueCompletion() + + expect(date1).not.toEqual(date2) + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + createdAt: date1.toISOString(), + updatedAt: date2.toISOString(), + }) + ) + }) + }) + + it("concurrent patches will override keys", async () => { + await config.doInTenant(async () => { + const patch1 = generatePatchObject(2) + await docWritethrough.patch(patch1) + await waitForQueueCompletion() + const patch2 = generatePatchObject(1) + await docWritethrough.patch(patch2) + + const keyToOverride = _.sample(Object.keys(patch1))! + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + [keyToOverride]: patch1[keyToOverride], + }) + ) + + await waitForQueueCompletion() + + const patch3 = { + ...generatePatchObject(3), + [keyToOverride]: generator.word(), + } + await docWritethrough.patch(patch3) + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ + ...patch1, + ...patch2, + ...patch3, + }) + ) + }) + }) + + it("concurrent patches to different docWritethrough will not pollute each other", async () => { + await config.doInTenant(async () => { + const secondDocWritethrough = new DocWritethrough( + db, + structures.db.id() + ) + + const doc1Patch = generatePatchObject(2) + await docWritethrough.patch(doc1Patch) + const doc2Patch = generatePatchObject(1) + await secondDocWritethrough.patch(doc2Patch) + + await waitForQueueCompletion() + + const doc1Patch2 = generatePatchObject(3) + await docWritethrough.patch(doc1Patch2) + const doc2Patch2 = generatePatchObject(3) + await secondDocWritethrough.patch(doc2Patch2) + await waitForQueueCompletion() + + expect(await db.get(docWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc1Patch, + ...doc1Patch2, + }) + ) + + expect(await db.get(secondDocWritethrough.docId)).toEqual( + expect.objectContaining({ + ...doc2Patch, + ...doc2Patch2, + }) + ) + }) + }) + + it("cached values are persisted only once", async () => { + await config.doInTenant(async () => { + const initialPatch = generatePatchObject(5) + + await docWritethrough.patch(initialPatch) + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(initialPatch) + ) + + await db.remove(await db.get(documentId)) + + await waitForQueueCompletion() + const extraPatch = generatePatchObject(5) + await docWritethrough.patch(extraPatch) + await waitForQueueCompletion() + + expect(await db.get(documentId)).toEqual( + expect.objectContaining(extraPatch) + ) + expect(await db.get(documentId)).not.toEqual( + expect.objectContaining(initialPatch) + ) + }) + }) + + it("concurrent calls will not cause conflicts", async () => { + async function parallelPatch(count: number) { + const patches = Array.from({ length: count }).map(() => + generatePatchObject(1) + ) + await Promise.all(patches.map(p => docWritethrough.patch(p))) + + return patches.reduce((acc, c) => { + acc = { ...acc, ...c } + return acc + }, {}) + } + const queueMessageSpy = jest.spyOn(docWritethroughProcessorQueue, "add") + + await config.doInTenant(async () => { + let patches = await parallelPatch(5) + expect(queueMessageSpy).toBeCalledTimes(5) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining(patches) + ) + + patches = { ...patches, ...(await parallelPatch(40)) } + expect(queueMessageSpy).toBeCalledTimes(45) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining(patches) + ) + + patches = { ...patches, ...(await parallelPatch(10)) } + expect(queueMessageSpy).toBeCalledTimes(55) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining(patches) + ) + }) + }) + + // This is not yet supported + it.skip("patches will execute in order", async () => { + let incrementalValue = 0 + const keyToOverride = generator.word() + async function incrementalPatches(count: number) { + for (let i = 0; i < count; i++) { + await docWritethrough.patch({ [keyToOverride]: incrementalValue++ }) + } + } + + await config.doInTenant(async () => { + await incrementalPatches(5) + + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 5 }) + ) + + await incrementalPatches(40) + await waitForQueueCompletion() + expect(await db.get(documentId)).toEqual( + expect.objectContaining({ [keyToOverride]: 45 }) + ) + }) + }) + }) +}) diff --git a/packages/backend-core/src/cache/user.ts b/packages/backend-core/src/cache/user.ts index 313b9a4d4a..ecfa20f99e 100644 --- a/packages/backend-core/src/cache/user.ts +++ b/packages/backend-core/src/cache/user.ts @@ -6,7 +6,7 @@ import env from "../environment" import * as accounts from "../accounts" import { UserDB } from "../users" import { sdk } from "@budibase/shared-core" -import { User } from "@budibase/types" +import { User, UserMetadata } from "@budibase/types" const EXPIRY_SECONDS = 3600 @@ -15,7 +15,7 @@ const EXPIRY_SECONDS = 3600 */ async function populateFromDB(userId: string, tenantId: string) { const db = tenancy.getTenantDB(tenantId) - const user = await db.get(userId) + const user = await db.get(userId) user.budibaseAccess = true if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) { const account = await accounts.getAccount(user.email) diff --git a/packages/backend-core/src/constants/db.ts b/packages/backend-core/src/constants/db.ts index ac00483021..f4caac502e 100644 --- a/packages/backend-core/src/constants/db.ts +++ b/packages/backend-core/src/constants/db.ts @@ -57,6 +57,9 @@ export const StaticDatabases = { AUDIT_LOGS: { name: "audit-logs", }, + SCIM_LOGS: { + name: "scim-logs", + }, } export const APP_PREFIX = prefixed(DocumentType.APP) diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 36fd5dcb48..ae86695168 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -35,6 +35,17 @@ export function getAuditLogDBName(tenantId?: string) { } } +export function getScimDBName(tenantId?: string) { + if (!tenantId) { + tenantId = getTenantId() + } + if (tenantId === DEFAULT_TENANT_ID) { + return StaticDatabases.SCIM_LOGS.name + } else { + return `${tenantId}${SEPARATOR}${StaticDatabases.SCIM_LOGS.name}` + } +} + export function baseGlobalDBName(tenantId: string | undefined | null) { if (!tenantId || tenantId === DEFAULT_TENANT_ID) { return StaticDatabases.GLOBAL.name diff --git a/packages/backend-core/src/db/Replication.ts b/packages/backend-core/src/db/Replication.ts index f91a37ce8f..9c960d76dd 100644 --- a/packages/backend-core/src/db/Replication.ts +++ b/packages/backend-core/src/db/Replication.ts @@ -1,66 +1,57 @@ +import PouchDB from "pouchdb" import { getPouchDB, closePouchDB } from "./couch" import { DocumentType } from "../constants" class Replication { - source: any - target: any - replication: any + source: PouchDB.Database + target: PouchDB.Database - /** - * - * @param source - the DB you want to replicate or rollback to - * @param target - the DB you want to replicate to, or rollback from - */ - constructor({ source, target }: any) { + constructor({ source, target }: { source: string; target: string }) { this.source = getPouchDB(source) this.target = getPouchDB(target) } - close() { - return Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) + async close() { + await Promise.all([closePouchDB(this.source), closePouchDB(this.target)]) } - promisify(operation: any, opts = {}) { - return new Promise(resolve => { - operation(this.target, opts) - .on("denied", function (err: any) { + replicate(opts: PouchDB.Replication.ReplicateOptions = {}) { + return new Promise>(resolve => { + this.source.replicate + .to(this.target, opts) + .on("denied", function (err) { // a document failed to replicate (e.g. due to permissions) throw new Error(`Denied: Document failed to replicate ${err}`) }) - .on("complete", function (info: any) { + .on("complete", function (info) { return resolve(info) }) - .on("error", function (err: any) { + .on("error", function (err) { throw new Error(`Replication Error: ${err}`) }) }) } - /** - * Two way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - sync(opts = {}) { - this.replication = this.promisify(this.source.sync, opts) - return this.replication - } + appReplicateOpts( + opts: PouchDB.Replication.ReplicateOptions = {} + ): PouchDB.Replication.ReplicateOptions { + if (typeof opts.filter === "string") { + return opts + } - /** - * One way replication operation, intended to be promise based. - * @param opts - PouchDB replication options - */ - replicate(opts = {}) { - this.replication = this.promisify(this.source.replicate.to, opts) - return this.replication - } + const filter = opts.filter + delete opts.filter - appReplicateOpts() { return { - filter: (doc: any) => { + ...opts, + filter: (doc: any, params: any) => { if (doc._id && doc._id.startsWith(DocumentType.AUTOMATION_LOG)) { return false } - return doc._id !== DocumentType.APP_METADATA + if (doc._id === DocumentType.APP_METADATA) { + return false + } + return filter ? filter(doc, params) : true }, } } @@ -75,10 +66,6 @@ class Replication { // take the opportunity to remove deleted tombstones await this.replicate() } - - cancel() { - this.replication.cancel() - } } export default Replication diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 0e2b4173b0..416313f520 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -11,6 +11,7 @@ import { Document, isDocument, RowResponse, + RowValue, } from "@budibase/types" import { getCouchInfo } from "./connections" import { directCouchUrlCall } from "./utils" @@ -69,7 +70,15 @@ export class DatabaseImpl implements Database { DatabaseImpl.nano = buildNano(couchInfo) } - async exists() { + exists(docId?: string) { + if (docId === undefined) { + return this.dbExists() + } + + return this.docExists(docId) + } + + private async dbExists() { const response = await directCouchUrlCall({ url: `${this.couchInfo.url}/${this.name}`, method: "HEAD", @@ -78,6 +87,15 @@ export class DatabaseImpl implements Database { return response.status === 200 } + private async docExists(id: string): Promise { + try { + await this.performCall(db => () => db.head(id)) + return true + } catch { + return false + } + } + private nano() { return this.instanceNano || DatabaseImpl.nano } @@ -221,7 +239,7 @@ export class DatabaseImpl implements Database { }) } - async allDocs( + async allDocs( params: DatabaseQueryOpts ): Promise> { return this.performCall(db => { diff --git a/packages/backend-core/src/db/instrumentation.ts b/packages/backend-core/src/db/instrumentation.ts index aa2ac424ae..795f30d7cd 100644 --- a/packages/backend-core/src/db/instrumentation.ts +++ b/packages/backend-core/src/db/instrumentation.ts @@ -1,5 +1,4 @@ import { - DocumentScope, DocumentDestroyResponse, DocumentInsertResponse, DocumentBulkResponse, @@ -13,6 +12,7 @@ import { DatabasePutOpts, DatabaseQueryOpts, Document, + RowValue, } from "@budibase/types" import tracer from "dd-trace" import { Writable } from "stream" @@ -24,9 +24,12 @@ export class DDInstrumentedDatabase implements Database { return this.db.name } - exists(): Promise { + exists(docId?: string): Promise { return tracer.trace("db.exists", span => { - span?.addTags({ db_name: this.name }) + span?.addTags({ db_name: this.name, doc_id: docId }) + if (docId) { + return this.db.exists(docId) + } return this.db.exists() }) } @@ -79,7 +82,7 @@ export class DDInstrumentedDatabase implements Database { }) } - allDocs( + allDocs( params: DatabaseQueryOpts ): Promise> { return tracer.trace("db.allDocs", span => { diff --git a/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts new file mode 100644 index 0000000000..586f13f417 --- /dev/null +++ b/packages/backend-core/src/db/tests/DatabaseImpl.spec.ts @@ -0,0 +1,55 @@ +import _ from "lodash" +import { AnyDocument } from "@budibase/types" +import { generator } from "../../../tests" +import { DatabaseImpl } from "../couch" +import { newid } from "../../utils" + +describe("DatabaseImpl", () => { + const database = new DatabaseImpl(generator.word()) + const documents: AnyDocument[] = [] + + beforeAll(async () => { + const docsToCreate = Array.from({ length: 10 }).map(() => ({ + _id: newid(), + })) + const createdDocs = await database.bulkDocs(docsToCreate) + + documents.push(...createdDocs.map((x: any) => ({ _id: x.id, _rev: x.rev }))) + }) + + describe("document exists", () => { + it("can check existing docs by id", async () => { + const existingDoc = _.sample(documents) + const result = await database.exists(existingDoc!._id!) + + expect(result).toBe(true) + }) + + it("can check non existing docs by id", async () => { + const result = await database.exists(newid()) + + expect(result).toBe(false) + }) + + it("can check an existing doc by id multiple times", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + + const results = [] + results.push(await database.exists(id)) + results.push(await database.exists(id)) + results.push(await database.exists(id)) + + expect(results).toEqual([true, true, true]) + }) + + it("returns false after the doc is deleted", async () => { + const existingDoc = _.sample(documents) + const id = existingDoc!._id! + expect(await database.exists(id)).toBe(true) + + await database.remove(existingDoc!) + expect(await database.exists(id)).toBe(false) + }) + }) +}) diff --git a/packages/backend-core/src/docIds/ids.ts b/packages/backend-core/src/docIds/ids.ts index 02176109da..9627b2b94c 100644 --- a/packages/backend-core/src/docIds/ids.ts +++ b/packages/backend-core/src/docIds/ids.ts @@ -74,7 +74,7 @@ export function getGlobalIDFromUserMetadataID(id: string) { * Generates a template ID. * @param ownerId The owner/user of the template, this could be global or a workspace level. */ -export function generateTemplateID(ownerId: any) { +export function generateTemplateID(ownerId: string) { return `${DocumentType.TEMPLATE}${SEPARATOR}${ownerId}${SEPARATOR}${newid()}` } @@ -105,7 +105,7 @@ export function prefixRoleID(name: string) { * Generates a new dev info document ID - this is scoped to a user. * @returns The new dev info ID which info for dev (like api key) can be stored under. */ -export const generateDevInfoID = (userId: any) => { +export const generateDevInfoID = (userId: string) => { return `${DocumentType.DEV_INFO}${SEPARATOR}${userId}` } diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index b3179cbeea..2da2a77d67 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -186,6 +186,7 @@ const environment = { environment[key] = value }, ROLLING_LOG_MAX_SIZE: process.env.ROLLING_LOG_MAX_SIZE || "10M", + DISABLE_SCIM_CALLS: process.env.DISABLE_SCIM_CALLS, } // clean up any environment variable edge cases diff --git a/packages/backend-core/src/middleware/errorHandling.ts b/packages/backend-core/src/middleware/errorHandling.ts index ebdd4107e9..2b8f7195ed 100644 --- a/packages/backend-core/src/middleware/errorHandling.ts +++ b/packages/backend-core/src/middleware/errorHandling.ts @@ -1,5 +1,6 @@ import { APIError } from "@budibase/types" import * as errors from "../errors" +import environment from "../environment" export async function errorHandling(ctx: any, next: any) { try { @@ -14,15 +15,19 @@ export async function errorHandling(ctx: any, next: any) { console.error(err) } - const error = errors.getPublicError(err) - const body: APIError = { + let error: APIError = { message: err.message, status: status, validationErrors: err.validation, - error, + error: errors.getPublicError(err), } - ctx.body = body + if (environment.isTest() && ctx.headers["x-budibase-include-stacktrace"]) { + // @ts-ignore + error.stack = err.stack + } + + ctx.body = error } } diff --git a/packages/backend-core/src/objectStore/buckets/plugins.ts b/packages/backend-core/src/objectStore/buckets/plugins.ts index 6f1b7116ae..02be9345ab 100644 --- a/packages/backend-core/src/objectStore/buckets/plugins.ts +++ b/packages/backend-core/src/objectStore/buckets/plugins.ts @@ -6,7 +6,7 @@ import { Plugin } from "@budibase/types" // URLS -export function enrichPluginURLs(plugins: Plugin[]) { +export function enrichPluginURLs(plugins?: Plugin[]): Plugin[] { if (!plugins || !plugins.length) { return [] } diff --git a/packages/backend-core/src/queue/constants.ts b/packages/backend-core/src/queue/constants.ts index eb4f21aced..a095c6c769 100644 --- a/packages/backend-core/src/queue/constants.ts +++ b/packages/backend-core/src/queue/constants.ts @@ -4,4 +4,5 @@ export enum JobQueue { AUDIT_LOG = "auditLogQueue", SYSTEM_EVENT_QUEUE = "systemEventQueue", APP_MIGRATION = "appMigration", + DOC_WRITETHROUGH_QUEUE = "docWritethroughQueue", } diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index c05bbffbe9..afb5592562 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -1,5 +1,14 @@ import events from "events" -import { timeout } from "../utils" +import { newid, timeout } from "../utils" +import { Queue, QueueOptions, JobOptions } from "./queue" + +interface JobMessage { + id: string + timestamp: number + queue: string + data: any + opts?: JobOptions +} /** * Bull works with a Job wrapper around all messages that contains a lot more information about @@ -10,12 +19,13 @@ import { timeout } from "../utils" * @returns A new job which can now be put onto the queue, this is mostly an * internal structure so that an in memory queue can be easily swapped for a Bull queue. */ -function newJob(queue: string, message: any) { +function newJob(queue: string, message: any, opts?: JobOptions): JobMessage { return { + id: newid(), timestamp: Date.now(), queue: queue, data: message, - opts: {}, + opts, } } @@ -24,26 +34,29 @@ function newJob(queue: string, message: any) { * It is relatively simple, using an event emitter internally to register when messages are available * to the consumers - in can support many inputs and many consumers. */ -class InMemoryQueue { +class InMemoryQueue implements Partial { _name: string - _opts?: any - _messages: any[] + _opts?: QueueOptions + _messages: JobMessage[] + _queuedJobIds: Set _emitter: EventEmitter _runCount: number _addCount: number + /** * The constructor the queue, exactly the same as that of Bulls. * @param name The name of the queue which is being configured. * @param opts This is not used by the in memory queue as there is no real use * case when in memory, but is the same API as Bull */ - constructor(name: string, opts?: any) { + constructor(name: string, opts?: QueueOptions) { this._name = name this._opts = opts this._messages = [] this._emitter = new events.EventEmitter() this._runCount = 0 this._addCount = 0 + this._queuedJobIds = new Set() } /** @@ -55,22 +68,42 @@ class InMemoryQueue { * note this is incredibly limited compared to Bull as in reality the Job would contain * a lot more information about the queue and current status of Bull cluster. */ - process(func: any) { + async process(func: any) { this._emitter.on("message", async () => { if (this._messages.length <= 0) { return } let msg = this._messages.shift() + let resp = func(msg) + + async function retryFunc(fnc: any) { + try { + await fnc + } catch (e: any) { + await new Promise(r => setTimeout(() => r(), 50)) + + await retryFunc(func(msg)) + } + } + if (resp.then != null) { - await resp + try { + await retryFunc(resp) + } catch (e: any) { + console.error(e) + } } this._runCount++ + const jobId = msg?.opts?.jobId?.toString() + if (jobId && msg?.opts?.removeOnComplete) { + this._queuedJobIds.delete(jobId) + } }) } async isReady() { - return true + return this as any } // simply puts a message to the queue and emits to the queue for processing @@ -83,27 +116,45 @@ class InMemoryQueue { * @param repeat serves no purpose for the import queue. */ // eslint-disable-next-line no-unused-vars - add(msg: any, repeat: boolean) { - if (typeof msg !== "object") { + async add(data: any, opts?: JobOptions) { + const jobId = opts?.jobId?.toString() + if (jobId && this._queuedJobIds.has(jobId)) { + console.log(`Ignoring already queued job ${jobId}`) + return + } + + if (typeof data !== "object") { throw "Queue only supports carrying JSON." } - this._messages.push(newJob(this._name, msg)) - this._addCount++ - this._emitter.emit("message") + if (jobId) { + this._queuedJobIds.add(jobId) + } + + const pushMessage = () => { + this._messages.push(newJob(this._name, data, opts)) + this._addCount++ + this._emitter.emit("message") + } + + const delay = opts?.delay + if (delay) { + setTimeout(pushMessage, delay) + } else { + pushMessage() + } + return {} as any } /** * replicating the close function from bull, which waits for jobs to finish. */ - async close() { - return [] - } + async close() {} /** * This removes a cron which has been implemented, this is part of Bull API. * @param cronJobId The cron which is to be removed. */ - removeRepeatableByKey(cronJobId: string) { + async removeRepeatableByKey(cronJobId: string) { // TODO: implement for testing console.log(cronJobId) } @@ -111,12 +162,12 @@ class InMemoryQueue { /** * Implemented for tests */ - getRepeatableJobs() { + async getRepeatableJobs() { return [] } // eslint-disable-next-line no-unused-vars - removeJobs(pattern: string) { + async removeJobs(pattern: string) { // no-op } @@ -128,18 +179,22 @@ class InMemoryQueue { } async getJob() { - return {} + return null } on() { // do nothing - return this + return this as any } async waitForCompletion() { do { await timeout(50) - } while (this._addCount < this._runCount) + } while (this.hasRunningJobs()) + } + + hasRunningJobs() { + return this._addCount > this._runCount } } diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 063a01bd2f..14dce5fe8d 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -88,6 +88,7 @@ enum QueueEventType { AUDIT_LOG_EVENT = "audit-log-event", SYSTEM_EVENT = "system-event", APP_MIGRATION = "app-migration", + DOC_WRITETHROUGH = "doc-writethrough", } const EventTypeMap: { [key in JobQueue]: QueueEventType } = { @@ -96,6 +97,7 @@ const EventTypeMap: { [key in JobQueue]: QueueEventType } = { [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, [JobQueue.APP_MIGRATION]: QueueEventType.APP_MIGRATION, + [JobQueue.DOC_WRITETHROUGH_QUEUE]: QueueEventType.DOC_WRITETHROUGH, } function logging(queue: Queue, jobQueue: JobQueue) { diff --git a/packages/backend-core/src/queue/queue.ts b/packages/backend-core/src/queue/queue.ts index 0bcb25a35f..1838eed92f 100644 --- a/packages/backend-core/src/queue/queue.ts +++ b/packages/backend-core/src/queue/queue.ts @@ -7,6 +7,8 @@ import { addListeners, StalledFn } from "./listeners" import { Duration } from "../utils" import * as timers from "../timers" +export { QueueOptions, Queue, JobOptions } from "bull" + // the queue lock is held for 5 minutes const QUEUE_LOCK_MS = Duration.fromMinutes(5).toMs() // queue lock is refreshed every 30 seconds diff --git a/packages/backend-core/src/redis/init.ts b/packages/backend-core/src/redis/init.ts index f3bcee3209..7920dfed2d 100644 --- a/packages/backend-core/src/redis/init.ts +++ b/packages/backend-core/src/redis/init.ts @@ -9,7 +9,8 @@ let userClient: Client, lockClient: Client, socketClient: Client, inviteClient: Client, - passwordResetClient: Client + passwordResetClient: Client, + docWritethroughClient: Client export async function init() { userClient = await new Client(utils.Databases.USER_CACHE).init() @@ -24,6 +25,9 @@ export async function init() { utils.Databases.SOCKET_IO, utils.SelectableDatabase.SOCKET_IO ).init() + docWritethroughClient = await new Client( + utils.Databases.DOC_WRITE_THROUGH + ).init() } export async function shutdown() { @@ -104,3 +108,10 @@ export async function getPasswordResetClient() { } return passwordResetClient } + +export async function getDocWritethroughClient() { + if (!writethroughClient) { + await init() + } + return writethroughClient +} diff --git a/packages/backend-core/src/redis/redis.ts b/packages/backend-core/src/redis/redis.ts index d15453ba62..79f75421d3 100644 --- a/packages/backend-core/src/redis/redis.ts +++ b/packages/backend-core/src/redis/redis.ts @@ -1,5 +1,5 @@ import env from "../environment" -import Redis from "ioredis" +import Redis, { Cluster } from "ioredis" // mock-redis doesn't have any typing let MockRedis: any | undefined if (env.MOCK_REDIS) { @@ -28,7 +28,7 @@ const DEFAULT_SELECT_DB = SelectableDatabase.DEFAULT // for testing just generate the client once let CLOSED = false -let CLIENTS: { [key: number]: any } = {} +const CLIENTS: Record = {} let CONNECTED = false // mock redis always connected @@ -36,7 +36,7 @@ if (env.MOCK_REDIS) { CONNECTED = true } -function pickClient(selectDb: number): any { +function pickClient(selectDb: number) { return CLIENTS[selectDb] } @@ -201,12 +201,15 @@ class RedisWrapper { key = `${db}${SEPARATOR}${key}` let stream if (CLUSTERED) { - let node = this.getClient().nodes("master") + let node = (this.getClient() as never as Cluster).nodes("master") stream = node[0].scanStream({ match: key + "*", count: 100 }) } else { - stream = this.getClient().scanStream({ match: key + "*", count: 100 }) + stream = (this.getClient() as Redis).scanStream({ + match: key + "*", + count: 100, + }) } - return promisifyStream(stream, this.getClient()) + return promisifyStream(stream, this.getClient() as any) } async keys(pattern: string) { @@ -221,14 +224,16 @@ class RedisWrapper { async get(key: string) { const db = this._db - let response = await this.getClient().get(addDbPrefix(db, key)) + const response = await this.getClient().get(addDbPrefix(db, key)) // overwrite the prefixed key + // @ts-ignore if (response != null && response.key) { + // @ts-ignore response.key = key } // if its not an object just return the response try { - return JSON.parse(response) + return JSON.parse(response!) } catch (err) { return response } @@ -274,13 +279,37 @@ class RedisWrapper { } } + async bulkStore( + data: Record, + expirySeconds: number | null = null + ) { + const client = this.getClient() + + const dataToStore = Object.entries(data).reduce((acc, [key, value]) => { + acc[addDbPrefix(this._db, key)] = + typeof value === "object" ? JSON.stringify(value) : value + return acc + }, {} as Record) + + const pipeline = client.pipeline() + pipeline.mset(dataToStore) + + if (expirySeconds !== null) { + for (const key of Object.keys(dataToStore)) { + pipeline.expire(key, expirySeconds) + } + } + + await pipeline.exec() + } + async getTTL(key: string) { const db = this._db const prefixedKey = addDbPrefix(db, key) return this.getClient().ttl(prefixedKey) } - async setExpiry(key: string, expirySeconds: number | null) { + async setExpiry(key: string, expirySeconds: number) { const db = this._db const prefixedKey = addDbPrefix(db, key) await this.getClient().expire(prefixedKey, expirySeconds) @@ -291,10 +320,35 @@ class RedisWrapper { await this.getClient().del(addDbPrefix(db, key)) } + async bulkDelete(keys: string[]) { + const db = this._db + await this.getClient().del(keys.map(key => addDbPrefix(db, key))) + } + async clear() { let items = await this.scan() await Promise.all(items.map((obj: any) => this.delete(obj.key))) } + + async increment(key: string) { + const result = await this.getClient().incr(addDbPrefix(this._db, key)) + if (isNaN(result)) { + throw new Error(`Redis ${key} does not contain a number`) + } + return result + } + + async deleteIfValue(key: string, value: any) { + const client = this.getClient() + + const luaScript = ` + if redis.call('GET', KEYS[1]) == ARGV[1] then + redis.call('DEL', KEYS[1]) + end + ` + + await client.eval(luaScript, 1, addDbPrefix(this._db, key), value) + } } export default RedisWrapper diff --git a/packages/backend-core/src/redis/redlockImpl.ts b/packages/backend-core/src/redis/redlockImpl.ts index 7009dc6f55..adeb5b12ec 100644 --- a/packages/backend-core/src/redis/redlockImpl.ts +++ b/packages/backend-core/src/redis/redlockImpl.ts @@ -72,7 +72,7 @@ const OPTIONS: Record = { export async function newRedlock(opts: Redlock.Options = {}) { const options = { ...OPTIONS.DEFAULT, ...opts } const redisWrapper = await getLockClient() - const client = redisWrapper.getClient() + const client = redisWrapper.getClient() as any return new Redlock([client], options) } diff --git a/packages/backend-core/src/redis/tests/redis.spec.ts b/packages/backend-core/src/redis/tests/redis.spec.ts new file mode 100644 index 0000000000..c2c9e4a14e --- /dev/null +++ b/packages/backend-core/src/redis/tests/redis.spec.ts @@ -0,0 +1,214 @@ +import { GenericContainer, StartedTestContainer } from "testcontainers" +import { generator, structures } from "../../../tests" +import RedisWrapper from "../redis" +import { env } from "../.." + +jest.setTimeout(30000) + +describe("redis", () => { + let redis: RedisWrapper + let container: StartedTestContainer + + beforeAll(async () => { + const container = await new GenericContainer("redis") + .withExposedPorts(6379) + .start() + + env._set( + "REDIS_URL", + `${container.getHost()}:${container.getMappedPort(6379)}` + ) + env._set("MOCK_REDIS", 0) + env._set("REDIS_PASSWORD", 0) + }) + + afterAll(() => container?.stop()) + + beforeEach(async () => { + redis = new RedisWrapper(structures.db.id()) + await redis.init() + }) + + describe("store", () => { + it("a basic value can be persisted", async () => { + const key = structures.uuid() + const value = generator.word() + + await redis.store(key, value) + + expect(await redis.get(key)).toEqual(value) + }) + + it("objects can be persisted", async () => { + const key = structures.uuid() + const value = { [generator.word()]: generator.word() } + + await redis.store(key, value) + + expect(await redis.get(key)).toEqual(value) + }) + }) + + describe("bulkStore", () => { + function createRandomObject( + keyLength: number, + valueGenerator: () => any = () => generator.word() + ) { + return generator + .unique(() => generator.word(), keyLength) + .reduce((acc, key) => { + acc[key] = valueGenerator() + return acc + }, {} as Record) + } + + it("a basic object can be persisted", async () => { + const data = createRandomObject(10) + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + } + + expect(await redis.keys("*")).toHaveLength(10) + }) + + it("a complex object can be persisted", async () => { + const data = { + ...createRandomObject(10, () => createRandomObject(5)), + ...createRandomObject(5), + } + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + } + + expect(await redis.keys("*")).toHaveLength(15) + }) + + it("no TTL is set by default", async () => { + const data = createRandomObject(10) + + await redis.bulkStore(data) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + expect(await redis.getTTL(key)).toEqual(-1) + } + }) + + it("a bulk store can be persisted with TTL", async () => { + const ttl = 500 + const data = createRandomObject(8) + + await redis.bulkStore(data, ttl) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toEqual(value) + expect(await redis.getTTL(key)).toEqual(ttl) + } + + expect(await redis.keys("*")).toHaveLength(8) + }) + + it("setting a TTL of -1 will not persist the key", async () => { + const ttl = -1 + const data = createRandomObject(5) + + await redis.bulkStore(data, ttl) + + for (const [key, value] of Object.entries(data)) { + expect(await redis.get(key)).toBe(null) + } + + expect(await redis.keys("*")).toHaveLength(0) + }) + }) + + describe("increment", () => { + it("can increment on a new key", async () => { + const key = structures.uuid() + const result = await redis.increment(key) + expect(result).toBe(1) + }) + + it("can increment multiple times", async () => { + const key = structures.uuid() + const results = [ + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + await redis.increment(key), + ] + expect(results).toEqual([1, 2, 3, 4, 5]) + }) + + it("can increment on a new key", async () => { + const key1 = structures.uuid() + const key2 = structures.uuid() + + const result1 = await redis.increment(key1) + expect(result1).toBe(1) + + const result2 = await redis.increment(key2) + expect(result2).toBe(1) + }) + + it("can increment multiple times in parallel", async () => { + const key = structures.uuid() + const results = await Promise.all( + Array.from({ length: 100 }).map(() => redis.increment(key)) + ) + expect(results).toHaveLength(100) + expect(results).toEqual(Array.from({ length: 100 }).map((_, i) => i + 1)) + }) + + it("can increment existing set keys", async () => { + const key = structures.uuid() + await redis.store(key, 70) + await redis.increment(key) + + const result = await redis.increment(key) + expect(result).toBe(72) + }) + + it.each([ + generator.word(), + generator.bool(), + { [generator.word()]: generator.word() }, + ])("cannot increment if the store value is not a number", async value => { + const key = structures.uuid() + await redis.store(key, value) + + await expect(redis.increment(key)).rejects.toThrowError( + "ERR value is not an integer or out of range" + ) + }) + }) + + describe("deleteIfValue", () => { + it("can delete if the value matches", async () => { + const key = structures.uuid() + const value = generator.word() + await redis.store(key, value) + + await redis.deleteIfValue(key, value) + + expect(await redis.get(key)).toBeNull() + }) + + it("will not delete if the value does not matches", async () => { + const key = structures.uuid() + const value = generator.word() + await redis.store(key, value) + + await redis.deleteIfValue(key, generator.word()) + + expect(await redis.get(key)).toEqual(value) + }) + }) +}) diff --git a/packages/backend-core/src/redis/utils.ts b/packages/backend-core/src/redis/utils.ts index 7b93458b52..7f84f11467 100644 --- a/packages/backend-core/src/redis/utils.ts +++ b/packages/backend-core/src/redis/utils.ts @@ -30,6 +30,7 @@ export enum Databases { LOCKS = "locks", SOCKET_IO = "socket_io", BPM_EVENTS = "bpmEvents", + DOC_WRITE_THROUGH = "docWriteThrough", } /** diff --git a/packages/backend-core/src/security/roles.ts b/packages/backend-core/src/security/roles.ts index 4f048c0a11..a64be6b319 100644 --- a/packages/backend-core/src/security/roles.ts +++ b/packages/backend-core/src/security/roles.ts @@ -84,25 +84,24 @@ export function getBuiltinRoles(): { [key: string]: RoleDoc } { return cloneDeep(BUILTIN_ROLES) } -export const BUILTIN_ROLE_ID_ARRAY = Object.values(BUILTIN_ROLES).map( - role => role._id -) +export function isBuiltin(role: string) { + return getBuiltinRole(role) !== undefined +} -export const BUILTIN_ROLE_NAME_ARRAY = Object.values(BUILTIN_ROLES).map( - role => role.name -) - -export function isBuiltin(role?: string) { - return BUILTIN_ROLE_ID_ARRAY.some(builtin => role?.includes(builtin)) +export function getBuiltinRole(roleId: string): Role | undefined { + const role = Object.values(BUILTIN_ROLES).find(role => + roleId.includes(role._id) + ) + if (!role) { + return undefined + } + return cloneDeep(role) } /** * Works through the inheritance ranks to see how far up the builtin stack this ID is. */ -export function builtinRoleToNumber(id?: string) { - if (!id) { - return 0 - } +export function builtinRoleToNumber(id: string) { const builtins = getBuiltinRoles() const MAX = Object.values(builtins).length + 1 if (id === BUILTIN_IDS.ADMIN || id === BUILTIN_IDS.BUILDER) { @@ -123,7 +122,7 @@ export function builtinRoleToNumber(id?: string) { /** * Converts any role to a number, but has to be async to get the roles from db. */ -export async function roleToNumber(id?: string) { +export async function roleToNumber(id: string) { if (isBuiltin(id)) { return builtinRoleToNumber(id) } @@ -131,7 +130,7 @@ export async function roleToNumber(id?: string) { defaultPublic: true, })) as RoleDoc[] for (let role of hierarchy) { - if (isBuiltin(role?.inherits)) { + if (role?.inherits && isBuiltin(role.inherits)) { return builtinRoleToNumber(role.inherits) + 1 } } @@ -161,35 +160,28 @@ export function lowerBuiltinRoleID(roleId1?: string, roleId2?: string): string { * @returns The role object, which may contain an "inherits" property. */ export async function getRole( - roleId?: string, + roleId: string, opts?: { defaultPublic?: boolean } -): Promise { - if (!roleId) { - return undefined - } - let role: any = {} +): Promise { // built in roles mostly come from the in-code implementation, // but can be extended by a doc stored about them (e.g. permissions) - if (isBuiltin(roleId)) { - role = cloneDeep( - Object.values(BUILTIN_ROLES).find(role => role._id === roleId) - ) - } else { + let role: RoleDoc | undefined = getBuiltinRole(roleId) + if (!role) { // make sure has the prefix (if it has it then it won't be added) roleId = prefixRoleID(roleId) } try { const db = getAppDB() - const dbRole = await db.get(getDBRoleID(roleId)) - role = Object.assign(role, dbRole) + const dbRole = await db.get(getDBRoleID(roleId)) + role = Object.assign(role || {}, dbRole) // finalise the ID - role._id = getExternalRoleID(role._id, role.version) + role._id = getExternalRoleID(role._id!, role.version) } catch (err) { if (!isBuiltin(roleId) && opts?.defaultPublic) { return cloneDeep(BUILTIN_ROLES.PUBLIC) } // only throw an error if there is no role at all - if (Object.keys(role).length === 0) { + if (!role || Object.keys(role).length === 0) { throw err } } @@ -200,7 +192,7 @@ export async function getRole( * Simple function to get all the roles based on the top level user role ID. */ async function getAllUserRoles( - userRoleId?: string, + userRoleId: string, opts?: { defaultPublic?: boolean } ): Promise { // admins have access to all roles @@ -226,7 +218,7 @@ async function getAllUserRoles( } export async function getUserRoleIdHierarchy( - userRoleId?: string + userRoleId: string ): Promise { const roles = await getUserRoleHierarchy(userRoleId) return roles.map(role => role._id!) @@ -241,7 +233,7 @@ export async function getUserRoleIdHierarchy( * highest level of access and the last being the lowest level. */ export async function getUserRoleHierarchy( - userRoleId?: string, + userRoleId: string, opts?: { defaultPublic?: boolean } ) { // special case, if they don't have a role then they are a public user @@ -265,9 +257,9 @@ export function checkForRoleResourceArray( return rolePerms } -export async function getAllRoleIds(appId?: string) { +export async function getAllRoleIds(appId: string): Promise { const roles = await getAllRoles(appId) - return roles.map(role => role._id) + return roles.map(role => role._id!) } /** diff --git a/packages/backend-core/tests/core/utilities/structures/accounts.ts b/packages/backend-core/tests/core/utilities/structures/accounts.ts index 515f94db1e..7dcc2de116 100644 --- a/packages/backend-core/tests/core/utilities/structures/accounts.ts +++ b/packages/backend-core/tests/core/utilities/structures/accounts.ts @@ -18,7 +18,7 @@ export const account = (partial: Partial = {}): Account => { return { accountId: uuid(), tenantId: generator.word(), - email: generator.email(), + email: generator.email({ domain: "example.com" }), tenantName: generator.word(), hosting: Hosting.SELF, createdAt: Date.now(), diff --git a/packages/backend-core/tests/core/utilities/structures/scim.ts b/packages/backend-core/tests/core/utilities/structures/scim.ts index 80f41c605d..f424b2881a 100644 --- a/packages/backend-core/tests/core/utilities/structures/scim.ts +++ b/packages/backend-core/tests/core/utilities/structures/scim.ts @@ -13,7 +13,7 @@ interface CreateUserRequestFields { export function createUserRequest(userData?: Partial) { const defaultValues = { externalId: uuid(), - email: generator.email(), + email: `${uuid()}@example.com`, firstName: generator.first(), lastName: generator.last(), username: generator.name(), diff --git a/packages/bbui/src/Actions/position_dropdown.js b/packages/bbui/src/Actions/position_dropdown.js index cc169eac09..d259b9197a 100644 --- a/packages/bbui/src/Actions/position_dropdown.js +++ b/packages/bbui/src/Actions/position_dropdown.js @@ -35,7 +35,10 @@ export default function positionDropdown(element, opts) { } if (typeof customUpdate === "function") { - styles = customUpdate(anchorBounds, elementBounds, styles) + styles = customUpdate(anchorBounds, elementBounds, { + ...styles, + offset: opts.offset, + }) } else { // Determine vertical styles if (align === "right-outside") { diff --git a/packages/builder/src/analytics/index.js b/packages/builder/src/analytics/index.js index 6bb10acdb5..3a80a05d7f 100644 --- a/packages/builder/src/analytics/index.js +++ b/packages/builder/src/analytics/index.js @@ -9,13 +9,17 @@ const intercom = new IntercomClient(process.env.INTERCOM_TOKEN) class AnalyticsHub { constructor() { this.clients = [posthog, intercom] + this.initialised = false } async activate() { // Check analytics are enabled const analyticsStatus = await API.getAnalyticsStatus() - if (analyticsStatus.enabled) { - this.clients.forEach(client => client.init()) + if (analyticsStatus.enabled && !this.initialised) { + this.clients.forEach(client => { + client.init() + }) + this.initialised = true } } diff --git a/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte b/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte index 6b9524776c..b54ecbf9fd 100644 --- a/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte +++ b/packages/builder/src/components/backend/Datasources/CreateEditRelationship.svelte @@ -40,8 +40,15 @@ part2: PrettyRelationshipDefinitions.MANY, }, } - let relationshipOpts1 = Object.values(PrettyRelationshipDefinitions) - let relationshipOpts2 = Object.values(PrettyRelationshipDefinitions) + $: relationshipOpts1 = + relationshipPart2 === PrettyRelationshipDefinitions.ONE + ? [PrettyRelationshipDefinitions.MANY] + : Object.values(PrettyRelationshipDefinitions) + + $: relationshipOpts2 = + relationshipPart1 === PrettyRelationshipDefinitions.ONE + ? [PrettyRelationshipDefinitions.MANY] + : Object.values(PrettyRelationshipDefinitions) let relationshipPart1 = PrettyRelationshipDefinitions.ONE let relationshipPart2 = PrettyRelationshipDefinitions.MANY diff --git a/packages/builder/src/components/common/HelpMenu.svelte b/packages/builder/src/components/common/HelpMenu.svelte index f6e2f42c98..baff9a5a27 100644 --- a/packages/builder/src/components/common/HelpMenu.svelte +++ b/packages/builder/src/components/common/HelpMenu.svelte @@ -1,11 +1,11 @@ @@ -67,13 +95,30 @@ options={componentOptions} on:change={() => (parameters.columns = [])} /> + - { + const columns = e.detail + parameters.columns = columns + parameters.customHeaders = columns.reduce((headerMap, column) => { + return { + [column.name]: column.displayName, + ...headerMap, + } + }, {}) + }} /> @@ -97,8 +142,8 @@ .params { display: grid; column-gap: var(--spacing-xs); - row-gap: var(--spacing-s); - grid-template-columns: 90px 1fr; + row-gap: var(--spacing-m); + grid-template-columns: 90px 1fr 90px; align-items: center; } diff --git a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte index a1fe773455..d834e9aac9 100644 --- a/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte +++ b/packages/builder/src/components/design/settings/controls/ButtonActionEditor/actions/SaveRow.svelte @@ -72,7 +72,10 @@ {#if parameters.confirm} - + + + + - import EditComponentPopover from "../EditComponentPopover.svelte" + import EditComponentPopover from "../EditComponentPopover/EditComponentPopover.svelte" import { Icon } from "@budibase/bbui" import { runtimeToReadableBinding } from "dataBinding" import { isJSBinding } from "@budibase/string-templates" diff --git a/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte b/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte index 2b9fa573c2..742ab785a1 100644 --- a/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte +++ b/packages/builder/src/components/design/settings/controls/ColumnEditor/ColumnEditor.svelte @@ -29,6 +29,12 @@ allowLinks: true, }) + $: { + value = (value || []).filter( + column => (schema || {})[column.name || column] !== undefined + ) + } + const getText = value => { if (!value?.length) { return "All columns" diff --git a/packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte similarity index 83% rename from packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte rename to packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte index 4e645fe343..39e4bc2ada 100644 --- a/packages/builder/src/components/design/settings/controls/EditComponentPopover.svelte +++ b/packages/builder/src/components/design/settings/controls/EditComponentPopover/EditComponentPopover.svelte @@ -3,6 +3,7 @@ import { componentStore } from "stores/builder" import { cloneDeep } from "lodash/fp" import { createEventDispatcher, getContext } from "svelte" + import { customPositionHandler } from "." import ComponentSettingsSection from "pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte" export let anchor @@ -54,25 +55,6 @@ dispatch("change", nestedComponentInstance) } - - const customPositionHandler = (anchorBounds, eleBounds, cfg) => { - let { left, top } = cfg - let percentageOffset = 30 - // left-outside - left = anchorBounds.left - eleBounds.width - 18 - - // shift up from the anchor, if space allows - let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset - let defaultTop = anchorBounds.top - offsetPos - - if (window.innerHeight - defaultTop < eleBounds.height) { - top = window.innerHeight - eleBounds.height - 5 - } else { - top = anchorBounds.top - offsetPos - } - - return { ...cfg, left, top } - } 0} maxHeight={600} + offset={18} handlePostionUpdate={customPositionHandler} > diff --git a/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js b/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js new file mode 100644 index 0000000000..2dc3f60185 --- /dev/null +++ b/packages/builder/src/components/design/settings/controls/EditComponentPopover/index.js @@ -0,0 +1,18 @@ +export const customPositionHandler = (anchorBounds, eleBounds, cfg) => { + let { left, top, offset } = cfg + let percentageOffset = 30 + // left-outside + left = anchorBounds.left - eleBounds.width - (offset || 5) + + // shift up from the anchor, if space allows + let offsetPos = Math.floor(eleBounds.height / 100) * percentageOffset + let defaultTop = anchorBounds.top - offsetPos + + if (window.innerHeight - defaultTop < eleBounds.height) { + top = window.innerHeight - eleBounds.height - 5 + } else { + top = anchorBounds.top - offsetPos + } + + return { ...cfg, left, top } +} diff --git a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte index 27590a9858..771bcf20e0 100644 --- a/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte +++ b/packages/builder/src/components/design/settings/controls/FieldConfiguration/FieldSetting.svelte @@ -1,5 +1,5 @@ -a11y-click-events-have-key-events
{displayValue} @@ -140,10 +139,22 @@ a11y-click-events-have-key-events {/each}
-
- +
+ { + if (event.key === "Enter") { + searchForIcon() + } + }} + thin + placeholder="Search Icon" + />
+ {#if value} + + {/if}
@@ -240,6 +251,7 @@ a11y-click-events-have-key-events flex-flow: row nowrap; width: 100%; padding-right: 15px; + gap: 10px; } .input-wrapper { width: 510px; diff --git a/packages/builder/src/components/design/settings/controls/PropertyControl.svelte b/packages/builder/src/components/design/settings/controls/PropertyControl.svelte index 94c9d22f54..0f0276823a 100644 --- a/packages/builder/src/components/design/settings/controls/PropertyControl.svelte +++ b/packages/builder/src/components/design/settings/controls/PropertyControl.svelte @@ -20,17 +20,23 @@ export let bindings = [] export let componentBindings = [] export let nested = false - export let highlighted = false export let propertyFocus = false export let info = null export let disableBindings = false export let wide - $: nullishValue = value == null || value === "" + let highlightType + + $: highlightedProp = $builderStore.highlightedSetting $: allBindings = getAllBindings(bindings, componentBindings, nested) $: safeValue = getSafeValue(value, defaultValue, allBindings) $: replaceBindings = val => readableToRuntimeBinding(allBindings, val) + $: if (!Array.isArray(value)) { + highlightType = + highlightedProp?.key === key ? `highlighted-${highlightedProp?.type}` : "" + } + const getAllBindings = (bindings, componentBindings, nested) => { if (!nested) { return bindings @@ -71,16 +77,17 @@ } onDestroy(() => { - if (highlighted) { + if (highlightedProp) { builderStore.highlightSetting(null) } })
{#if label && !labelHidden} @@ -115,6 +122,16 @@
diff --git a/packages/builder/src/components/portal/onboarding/TourPopover.svelte b/packages/builder/src/components/portal/onboarding/TourPopover.svelte index 23821d73a3..732ee40e11 100644 --- a/packages/builder/src/components/portal/onboarding/TourPopover.svelte +++ b/packages/builder/src/components/portal/onboarding/TourPopover.svelte @@ -1,6 +1,6 @@ {#if tourKey} @@ -98,7 +95,9 @@ anchor={popoverAnchor} maxWidth={300} dismissible={false} - offset={15} + offset={12} + handlePostionUpdate={tourStep?.positionHandler} + customZindex={3} >
@@ -119,7 +118,7 @@
+ +
createScreenModal.show("form")}> +
+ +
+
+ Form + Capture data from your users +
+
diff --git a/packages/builder/src/pages/builder/portal/apps/_components/PortalSideBar.svelte b/packages/builder/src/pages/builder/portal/apps/_components/PortalSideBar.svelte index 996c7c0580..f11059857b 100644 --- a/packages/builder/src/pages/builder/portal/apps/_components/PortalSideBar.svelte +++ b/packages/builder/src/pages/builder/portal/apps/_components/PortalSideBar.svelte @@ -1,10 +1,15 @@ { tableId: selection.tableId, rows: selection.selectedRows, format: action.parameters.type, - columns: action.parameters.columns, + columns: action.parameters.columns?.map( + column => column.name || column + ), + delimiter: action.parameters.delimiter, + customHeaders: action.parameters.customHeaders, }) download( new Blob([data], { type: "text/plain" }), diff --git a/packages/frontend-core/src/api/rows.js b/packages/frontend-core/src/api/rows.js index 79f837e864..0a0d48da43 100644 --- a/packages/frontend-core/src/api/rows.js +++ b/packages/frontend-core/src/api/rows.js @@ -89,13 +89,24 @@ export const buildRowEndpoints = API => ({ * @param rows the array of rows to export * @param format the format to export (csv or json) * @param columns which columns to export (all if undefined) + * @param delimiter how values should be separated in a CSV (default is comma) */ - exportRows: async ({ tableId, rows, format, columns, search }) => { + exportRows: async ({ + tableId, + rows, + format, + columns, + search, + delimiter, + customHeaders, + }) => { return await API.post({ url: `/api/${tableId}/rows/exportRows?format=${format}`, body: { rows, columns, + delimiter, + customHeaders, ...search, }, parseResponse: async response => { diff --git a/packages/frontend-core/src/components/grid/cells/DataCell.svelte b/packages/frontend-core/src/components/grid/cells/DataCell.svelte index cdaf28978a..d8cff26b9d 100644 --- a/packages/frontend-core/src/components/grid/cells/DataCell.svelte +++ b/packages/frontend-core/src/components/grid/cells/DataCell.svelte @@ -59,13 +59,13 @@ isReadonly: () => readonly, getType: () => column.schema.type, getValue: () => row[column.name], - setValue: (value, options = { save: true }) => { + setValue: (value, options = { apply: true }) => { validation.actions.setError(cellId, null) updateValue({ rowId: row._id, column: column.name, value, - save: options?.save, + apply: options?.apply, }) }, } diff --git a/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte b/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte index 8b0a0f0942..5e3a035d89 100644 --- a/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte +++ b/packages/frontend-core/src/components/grid/overlays/KeyboardManager.svelte @@ -217,14 +217,14 @@ const type = $focusedCellAPI.getType() if (type === "number" && keyCodeIsNumber(keyCode)) { // Update the value locally but don't save it yet - $focusedCellAPI.setValue(parseInt(key), { save: false }) + $focusedCellAPI.setValue(parseInt(key), { apply: false }) $focusedCellAPI.focus() } else if ( ["string", "barcodeqr", "longform"].includes(type) && (keyCodeIsLetter(keyCode) || keyCodeIsNumber(keyCode)) ) { // Update the value locally but don't save it yet - $focusedCellAPI.setValue(key, { save: false }) + $focusedCellAPI.setValue(key, { apply: false }) $focusedCellAPI.focus() } } diff --git a/packages/frontend-core/src/components/grid/stores/rows.js b/packages/frontend-core/src/components/grid/stores/rows.js index 34ba77afa2..5dc9413ccd 100644 --- a/packages/frontend-core/src/components/grid/stores/rows.js +++ b/packages/frontend-core/src/components/grid/stores/rows.js @@ -327,29 +327,31 @@ export const createActions = context => { get(fetch)?.getInitialData() } - // Patches a row with some changes - const updateRow = async (rowId, changes, options = { save: true }) => { + // Checks if a changeset for a row actually mutates the row or not + const changesAreValid = (row, changes) => { + const columns = Object.keys(changes || {}) + if (!row || !columns.length) { + return false + } + + // Ensure there is at least 1 column that creates a difference + return columns.some(column => row[column] !== changes[column]) + } + + // Patches a row with some changes in local state, and returns whether a + // valid pending change was made or not + const stashRowChanges = (rowId, changes) => { const $rows = get(rows) const $rowLookupMap = get(rowLookupMap) const index = $rowLookupMap[rowId] const row = $rows[index] - if (index == null || !Object.keys(changes || {}).length) { - return + + // Check this is a valid change + if (!row || !changesAreValid(row, changes)) { + return false } - // Abandon if no changes - let same = true - for (let column of Object.keys(changes)) { - if (row[column] !== changes[column]) { - same = false - break - } - } - if (same) { - return - } - - // Immediately update state so that the change is reflected + // Add change to cache rowChangeCache.update(state => ({ ...state, [rowId]: { @@ -357,26 +359,30 @@ export const createActions = context => { ...changes, }, })) + return true + } - // Stop here if we don't want to persist the change - if (!options?.save) { + // Saves any pending changes to a row + const applyRowChanges = async rowId => { + const $rows = get(rows) + const $rowLookupMap = get(rowLookupMap) + const index = $rowLookupMap[rowId] + const row = $rows[index] + if (row == null) { return } // Save change try { - inProgressChanges.update(state => ({ - ...state, - [rowId]: true, - })) + // Mark as in progress + inProgressChanges.update(state => ({ ...state, [rowId]: true })) // Update row - const saved = await datasource.actions.updateRow({ - ...cleanRow(row), - ...get(rowChangeCache)[rowId], - }) + const changes = get(rowChangeCache)[rowId] + const newRow = { ...cleanRow(row), ...changes } + const saved = await datasource.actions.updateRow(newRow) - // Update state after a successful change + // Update row state after a successful change if (saved?._id) { rows.update(state => { state[index] = saved @@ -386,6 +392,8 @@ export const createActions = context => { // Handle users table edge case await refreshRow(saved.id) } + + // Wipe row change cache now that we've saved the row rowChangeCache.update(state => { delete state[rowId] return state @@ -393,15 +401,17 @@ export const createActions = context => { } catch (error) { handleValidationError(rowId, error) } - inProgressChanges.update(state => ({ - ...state, - [rowId]: false, - })) + + // Mark as completed + inProgressChanges.update(state => ({ ...state, [rowId]: false })) } // Updates a value of a row - const updateValue = async ({ rowId, column, value, save = true }) => { - return await updateRow(rowId, { [column]: value }, { save }) + const updateValue = async ({ rowId, column, value, apply = true }) => { + const success = stashRowChanges(rowId, { [column]: value }) + if (success && apply) { + await applyRowChanges(rowId) + } } // Deletes an array of rows @@ -411,9 +421,7 @@ export const createActions = context => { } // Actually delete rows - rowsToDelete.forEach(row => { - delete row.__idx - }) + rowsToDelete.forEach(row => delete row.__idx) await datasource.actions.deleteRows(rowsToDelete) // Update state @@ -433,7 +441,7 @@ export const createActions = context => { newRow = newRows[i] // Ensure we have a unique _id. - // This means generating one for non DS+, overriting any that may already + // This means generating one for non DS+, overwriting any that may already // exist as we cannot allow duplicates. if (!$isDatasourcePlus) { newRow._id = Helpers.uuid() @@ -494,7 +502,7 @@ export const createActions = context => { duplicateRow, getRow, updateValue, - updateRow, + applyRowChanges, deleteRows, hasRow, loadNextPage, @@ -508,7 +516,14 @@ export const createActions = context => { } export const initialise = context => { - const { rowChangeCache, inProgressChanges, previousFocusedRowId } = context + const { + rowChangeCache, + inProgressChanges, + previousFocusedRowId, + previousFocusedCellId, + rows, + validation, + } = context // Wipe the row change cache when changing row previousFocusedRowId.subscribe(id => { @@ -519,4 +534,15 @@ export const initialise = context => { }) } }) + + // Ensure any unsaved changes are saved when changing cell + previousFocusedCellId.subscribe(async id => { + const rowId = id?.split("-")[0] + const hasErrors = validation.actions.rowHasErrors(rowId) + const hasChanges = Object.keys(get(rowChangeCache)[rowId] || {}).length > 0 + const isSavingChanges = get(inProgressChanges)[rowId] + if (rowId && !hasErrors && hasChanges && !isSavingChanges) { + await rows.actions.applyRowChanges(rowId) + } + }) } diff --git a/packages/frontend-core/src/components/grid/stores/ui.js b/packages/frontend-core/src/components/grid/stores/ui.js index 129d6614e5..da0558bb5b 100644 --- a/packages/frontend-core/src/components/grid/stores/ui.js +++ b/packages/frontend-core/src/components/grid/stores/ui.js @@ -16,6 +16,7 @@ export const createStores = context => { const hoveredRowId = writable(null) const rowHeight = writable(get(props).fixedRowHeight || DefaultRowHeight) const previousFocusedRowId = writable(null) + const previousFocusedCellId = writable(null) const gridFocused = writable(false) const isDragging = writable(false) const buttonColumnWidth = writable(0) @@ -48,6 +49,7 @@ export const createStores = context => { focusedCellAPI, focusedRowId, previousFocusedRowId, + previousFocusedCellId, hoveredRowId, rowHeight, gridFocused, @@ -129,6 +131,7 @@ export const initialise = context => { const { focusedRowId, previousFocusedRowId, + previousFocusedCellId, rows, focusedCellId, selectedRows, @@ -181,6 +184,13 @@ export const initialise = context => { lastFocusedRowId = id }) + // Remember the last focused cell ID so that we can store the previous one + let lastFocusedCellId = null + focusedCellId.subscribe(id => { + previousFocusedCellId.set(lastFocusedCellId) + lastFocusedCellId = id + }) + // Remove hovered row when a cell is selected focusedCellId.subscribe(cell => { if (cell && get(hoveredRowId)) { diff --git a/packages/frontend-core/src/components/grid/stores/validation.js b/packages/frontend-core/src/components/grid/stores/validation.js index 9c3927f9c9..70db076593 100644 --- a/packages/frontend-core/src/components/grid/stores/validation.js +++ b/packages/frontend-core/src/components/grid/stores/validation.js @@ -1,8 +1,23 @@ -import { writable, get } from "svelte/store" +import { writable, get, derived } from "svelte/store" +// Normally we would break out actions into the explicit "createActions" +// function, but for validation all these actions are pure so can go into +// "createStores" instead to make dependency ordering simpler export const createStores = () => { const validation = writable({}) + // Derive which rows have errors so that we can use that info later + const rowErrorMap = derived(validation, $validation => { + let map = {} + Object.entries($validation).forEach(([key, error]) => { + // Extract row ID from all errored cell IDs + if (error) { + map[key.split("-")[0]] = true + } + }) + return map + }) + const setError = (cellId, error) => { if (!cellId) { return @@ -13,11 +28,16 @@ export const createStores = () => { })) } + const rowHasErrors = rowId => { + return get(rowErrorMap)[rowId] + } + return { validation: { ...validation, actions: { setError, + rowHasErrors, }, }, } diff --git a/packages/frontend-core/src/themes/nord.css b/packages/frontend-core/src/themes/nord.css index 17771898ad..d47dbe8aa8 100644 --- a/packages/frontend-core/src/themes/nord.css +++ b/packages/frontend-core/src/themes/nord.css @@ -1,6 +1,4 @@ .spectrum--nord { - --spectrum-global-color-yellow-1000: #d8b500; - --spectrum-global-color-red-400: #bf616a; --spectrum-global-color-red-500: #c26971; --spectrum-global-color-red-600: #c57179; diff --git a/packages/pro b/packages/pro index 183b35d3ac..80a4d8ff99 160000 --- a/packages/pro +++ b/packages/pro @@ -1 +1 @@ -Subproject commit 183b35d3acd42433dcb2d32bcd89a36abe13afec +Subproject commit 80a4d8ff998895fc298ee510158a82ce7daebc67 diff --git a/packages/server/__mocks__/node-fetch.ts b/packages/server/__mocks__/node-fetch.ts index 265d6b1c36..98c75bb84f 100644 --- a/packages/server/__mocks__/node-fetch.ts +++ b/packages/server/__mocks__/node-fetch.ts @@ -8,6 +8,7 @@ module FetchMock { let mockSearch = false const func = async (url: any, opts: any) => { + const { host, pathname } = new URL(url) function json(body: any, status = 200) { return { status, @@ -34,7 +35,7 @@ module FetchMock { } } - if (url.includes("/api/global")) { + if (pathname.includes("/api/global")) { const user = { email: "test@example.com", _id: "us_test@example.com", @@ -47,31 +48,31 @@ module FetchMock { global: false, }, } - return url.endsWith("/users") && opts.method === "GET" + return pathname.endsWith("/users") && opts.method === "GET" ? json([user]) : json(user) } // mocked data based on url - else if (url.includes("api/apps")) { + else if (pathname.includes("api/apps")) { return json({ app1: { url: "/app1", }, }) - } else if (url.includes("example.com")) { + } else if (host.includes("example.com")) { return json({ body: opts.body, url, method: opts.method, }) - } else if (url.includes("invalid.com")) { + } else if (host.includes("invalid.com")) { return json( { invalid: true, }, 404 ) - } else if (mockSearch && url.includes("_search")) { + } else if (mockSearch && pathname.includes("_search")) { const body = opts.body const parts = body.split("tableId:") let tableId @@ -90,7 +91,7 @@ module FetchMock { ], bookmark: "test", }) - } else if (url.includes("google.com")) { + } else if (host.includes("google.com")) { return json({ url, opts, @@ -177,7 +178,7 @@ module FetchMock { } else if (url === "https://www.googleapis.com/oauth2/v4/token") { // any valid response return json({}) - } else if (url.includes("failonce.com")) { + } else if (host.includes("failonce.com")) { failCount++ if (failCount === 1) { return json({ message: "error" }, 500) diff --git a/packages/server/package.json b/packages/server/package.json index 45980a4be6..97de17eb58 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -149,7 +149,7 @@ "rimraf": "3.0.2", "supertest": "6.3.3", "swagger-jsdoc": "6.1.0", - "testcontainers": "10.6.0", + "testcontainers": "10.7.2", "timekeeper": "2.2.0", "ts-node": "10.8.1", "tsconfig-paths": "4.0.0", diff --git a/packages/server/scripts/integrations/mysql/init.sql b/packages/server/scripts/integrations/mysql/init.sql index ae5cd07788..e687c7c3b1 100644 --- a/packages/server/scripts/integrations/mysql/init.sql +++ b/packages/server/scripts/integrations/mysql/init.sql @@ -10,6 +10,11 @@ CREATE TABLE Persons ( City varchar(255), PRIMARY KEY (PersonID) ); +CREATE TABLE Person ( + PersonID int NOT NULL AUTO_INCREMENT, + Name varchar(255), + PRIMARY KEY (PersonID) +); CREATE TABLE Tasks ( TaskID int NOT NULL AUTO_INCREMENT, PersonID INT, @@ -27,6 +32,7 @@ CREATE TABLE Products ( ); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Mike', 'Hughes', 28.2, '123 Fake Street', 'Belfast', '2021-01-19 03:14:07'); INSERT INTO Persons (FirstName, LastName, Age, Address, City, CreatedAt) VALUES ('Dave', 'Johnson', 29, '124 Fake Street', 'Belfast', '2022-04-01 00:11:11'); +INSERT INTO Person (Name) VALUES ('Elf'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (1, 'assembling', '2020-01-01'); INSERT INTO Tasks (PersonID, TaskName, CreatedAt) VALUES (2, 'processing', '2019-12-31'); INSERT INTO Products (name, updated) VALUES ('Meat', '11:00:22'), ('Fruit', '10:00:00'); diff --git a/packages/server/scripts/integrations/postgres/reset.sh b/packages/server/scripts/integrations/postgres/reset.sh index 32778bd11f..8deb01cdf8 100755 --- a/packages/server/scripts/integrations/postgres/reset.sh +++ b/packages/server/scripts/integrations/postgres/reset.sh @@ -1,3 +1,3 @@ #!/bin/bash -docker-compose down +docker-compose down -v docker volume prune -f diff --git a/packages/server/scripts/test.sh b/packages/server/scripts/test.sh index 9efef05526..3ecf8bb794 100644 --- a/packages/server/scripts/test.sh +++ b/packages/server/scripts/test.sh @@ -3,12 +3,12 @@ set -e if [[ -n $CI ]] then - export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot" + export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS" echo "jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@" jest --coverage --maxWorkers=2 --forceExit --workerIdleMemoryLimit=2000MB --bail $@ else # --maxWorkers performs better in development - export NODE_OPTIONS="--no-node-snapshot" + export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS" echo "jest --coverage --maxWorkers=2 --forceExit $@" jest --coverage --maxWorkers=2 --forceExit $@ fi \ No newline at end of file diff --git a/packages/server/src/api/controllers/application.ts b/packages/server/src/api/controllers/application.ts index c838e14744..47cc08c738 100644 --- a/packages/server/src/api/controllers/application.ts +++ b/packages/server/src/api/controllers/application.ts @@ -48,6 +48,11 @@ import { PlanType, Screen, UserCtx, + CreateAppRequest, + FetchAppDefinitionResponse, + FetchAppPackageResponse, + DuplicateAppRequest, + DuplicateAppResponse, } from "@budibase/types" import { BASE_LAYOUT_PROP_IDS } from "../../constants/layouts" import sdk from "../../sdk" @@ -59,23 +64,23 @@ import * as appMigrations from "../../appMigrations" async function getLayouts() { const db = context.getAppDB() return ( - await db.allDocs( + await db.allDocs( getLayoutParams(null, { include_docs: true, }) ) - ).rows.map((row: any) => row.doc) + ).rows.map(row => row.doc!) } async function getScreens() { const db = context.getAppDB() return ( - await db.allDocs( + await db.allDocs( getScreenParams(null, { include_docs: true, }) ) - ).rows.map((row: any) => row.doc) + ).rows.map(row => row.doc!) } function getUserRoleId(ctx: UserCtx) { @@ -117,8 +122,8 @@ function checkAppName( } interface AppTemplate { - templateString: string - useTemplate: string + templateString?: string + useTemplate?: string file?: { type?: string path: string @@ -175,14 +180,16 @@ export const addSampleData = async (ctx: UserCtx) => { ctx.status = 200 } -export async function fetch(ctx: UserCtx) { +export async function fetch(ctx: UserCtx) { ctx.body = await sdk.applications.fetch( ctx.query.status as AppStatus, ctx.user ) } -export async function fetchAppDefinition(ctx: UserCtx) { +export async function fetchAppDefinition( + ctx: UserCtx +) { const layouts = await getLayouts() const userRoleId = getUserRoleId(ctx) const accessController = new roles.AccessController() @@ -197,10 +204,12 @@ export async function fetchAppDefinition(ctx: UserCtx) { } } -export async function fetchAppPackage(ctx: UserCtx) { +export async function fetchAppPackage( + ctx: UserCtx +) { const db = context.getAppDB() const appId = context.getAppId() - let application = await db.get(DocumentType.APP_METADATA) + let application = await db.get(DocumentType.APP_METADATA) const layouts = await getLayouts() let screens = await getScreens() const license = await licensing.cache.getCachedLicense() @@ -232,17 +241,21 @@ export async function fetchAppPackage(ctx: UserCtx) { } } -async function performAppCreate(ctx: UserCtx) { +async function performAppCreate(ctx: UserCtx) { const apps = (await dbCore.getAllApps({ dev: true })) as App[] - const name = ctx.request.body.name, - possibleUrl = ctx.request.body.url, - encryptionPassword = ctx.request.body.encryptionPassword + const { + name, + url, + encryptionPassword, + useTemplate, + templateKey, + templateString, + } = ctx.request.body checkAppName(ctx, apps, name) - const url = sdk.applications.getAppUrl({ name, url: possibleUrl }) - checkAppUrl(ctx, apps, url) + const appUrl = sdk.applications.getAppUrl({ name, url }) + checkAppUrl(ctx, apps, appUrl) - const { useTemplate, templateKey, templateString } = ctx.request.body const instanceConfig: AppTemplate = { useTemplate, key: templateKey, @@ -273,7 +286,7 @@ async function performAppCreate(ctx: UserCtx) { version: envCore.VERSION, componentLibraries: ["@budibase/standard-components"], name: name, - url: url, + url: appUrl, template: templateKey, instance, tenantId: tenancy.getTenantId(), @@ -367,7 +380,7 @@ async function creationEvents(request: any, app: App) { creationFns.push(a => events.app.fileImported(a)) } // from server file path - else if (request.body.file && request.duplicate) { + else if (request.body.file) { // explicitly pass in the newly created app id creationFns.push(a => events.app.duplicated(a, app.appId)) } @@ -396,7 +409,7 @@ async function appPostCreate(ctx: UserCtx, app: App) { await creationEvents(ctx.request, app) - // app import & template creation + // app import, template creation and duplication if (ctx.request.body.useTemplate === "true") { const { rows } = await getUniqueRows([app.appId]) const rowCount = rows ? rows.length : 0 @@ -425,7 +438,7 @@ async function appPostCreate(ctx: UserCtx, app: App) { } } -export async function create(ctx: UserCtx) { +export async function create(ctx: UserCtx) { const newApplication = await quotas.addApp(() => performAppCreate(ctx)) await appPostCreate(ctx, newApplication) await cache.bustCache(cache.CacheKey.CHECKLIST) @@ -435,7 +448,9 @@ export async function create(ctx: UserCtx) { // This endpoint currently operates as a PATCH rather than a PUT // Thus name and url fields are handled only if present -export async function update(ctx: UserCtx) { +export async function update( + ctx: UserCtx<{ name?: string; url?: string }, App> +) { const apps = (await dbCore.getAllApps({ dev: true })) as App[] // validation const name = ctx.request.body.name, @@ -508,7 +523,7 @@ export async function revertClient(ctx: UserCtx) { const revertedToVersion = application.revertableVersion const appPackageUpdates = { version: revertedToVersion, - revertableVersion: null, + revertableVersion: undefined, } const app = await updateAppPackage(appPackageUpdates, ctx.params.appId) await events.app.versionReverted(app, currentVersion, revertedToVersion) @@ -632,7 +647,9 @@ export async function importToApp(ctx: UserCtx) { * Create a copy of the latest dev application. * Performs an export of the app, then imports from the export dir path */ -export async function duplicateApp(ctx: UserCtx) { +export async function duplicateApp( + ctx: UserCtx +) { const { name: appName, url: possibleUrl } = ctx.request.body const { appId: sourceAppId } = ctx.params const [app] = await dbCore.getAppsByIDs([sourceAppId]) @@ -652,24 +669,28 @@ export async function duplicateApp(ctx: UserCtx) { tar: false, }) - // Build a create request that triggers an import from the export path above. - const createReq: any = { - request: { - body: { - name: appName, - url: possibleUrl, - useTemplate: "true", - file: { - path: tmpPath, - }, - }, - // Mark the create as a duplicate to kick off the correct event. - duplicate: true, + const createRequestBody: CreateAppRequest = { + name: appName, + url: possibleUrl, + useTemplate: "true", + // The app export path + file: { + path: tmpPath, }, } - await create(createReq) - const { body: newApplication } = createReq + // Build a new request + const createRequest = { + roleId: ctx.roleId, + user: ctx.user, + request: { + body: createRequestBody, + }, + } as UserCtx + + // Build the new application + await create(createRequest) + const { body: newApplication } = createRequest if (!newApplication) { ctx.throw(500, "There was a problem duplicating the application") @@ -683,12 +704,15 @@ export async function duplicateApp(ctx: UserCtx) { ctx.status = 200 } -export async function updateAppPackage(appPackage: any, appId: any) { +export async function updateAppPackage( + appPackage: Partial, + appId: string +) { return context.doInAppContext(appId, async () => { const db = context.getAppDB() const application = await db.get(DocumentType.APP_METADATA) - const newAppPackage = { ...application, ...appPackage } + const newAppPackage: App = { ...application, ...appPackage } if (appPackage._rev !== application._rev) { newAppPackage._rev = application._rev } diff --git a/packages/server/src/api/controllers/automation.ts b/packages/server/src/api/controllers/automation.ts index 186b68f3b7..b986b5232b 100644 --- a/packages/server/src/api/controllers/automation.ts +++ b/packages/server/src/api/controllers/automation.ts @@ -20,6 +20,7 @@ import { AutomationActionStepId, AutomationResults, UserCtx, + DeleteAutomationResponse, } from "@budibase/types" import { getActionDefinitions as actionDefs } from "../../automations/actions" import sdk from "../../sdk" @@ -72,7 +73,9 @@ function cleanAutomationInputs(automation: Automation) { return automation } -export async function create(ctx: UserCtx) { +export async function create( + ctx: UserCtx +) { const db = context.getAppDB() let automation = ctx.request.body automation.appId = ctx.appId @@ -207,7 +210,7 @@ export async function find(ctx: UserCtx) { ctx.body = await db.get(ctx.params.id) } -export async function destroy(ctx: UserCtx) { +export async function destroy(ctx: UserCtx) { const db = context.getAppDB() const automationId = ctx.params.id const oldAutomation = await db.get(automationId) diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts index d70c13f800..0f17c5a2f5 100644 --- a/packages/server/src/api/controllers/datasource.ts +++ b/packages/server/src/api/controllers/datasource.ts @@ -15,10 +15,14 @@ import { FieldType, RelationshipFieldMetadata, SourceName, + UpdateDatasourceRequest, UpdateDatasourceResponse, UserCtx, VerifyDatasourceRequest, VerifyDatasourceResponse, + Table, + RowValue, + DynamicVariable, } from "@budibase/types" import sdk from "../../sdk" import { builderSocket } from "../../websockets" @@ -90,8 +94,10 @@ async function invalidateVariables( existingDatasource: Datasource, updatedDatasource: Datasource ) { - const existingVariables: any = existingDatasource.config?.dynamicVariables - const updatedVariables: any = updatedDatasource.config?.dynamicVariables + const existingVariables: DynamicVariable[] = + existingDatasource.config?.dynamicVariables || [] + const updatedVariables: DynamicVariable[] = + updatedDatasource.config?.dynamicVariables || [] const toInvalidate = [] if (!existingVariables) { @@ -103,9 +109,9 @@ async function invalidateVariables( toInvalidate.push(...existingVariables) } else { // invaldate changed / removed - existingVariables.forEach((existing: any) => { + existingVariables.forEach(existing => { const unchanged = updatedVariables.find( - (updated: any) => + updated => existing.name === updated.name && existing.queryId === updated.queryId && existing.value === updated.value @@ -118,24 +124,32 @@ async function invalidateVariables( await invalidateDynamicVariables(toInvalidate) } -export async function update(ctx: UserCtx) { +export async function update( + ctx: UserCtx +) { const db = context.getAppDB() const datasourceId = ctx.params.datasourceId const baseDatasource = await sdk.datasources.get(datasourceId) - const auth = baseDatasource.config?.auth await invalidateVariables(baseDatasource, ctx.request.body) const isBudibaseSource = baseDatasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE - const dataSourceBody = isBudibaseSource - ? { name: ctx.request.body?.name } + const dataSourceBody: Datasource = isBudibaseSource + ? { + name: ctx.request.body?.name, + type: dbCore.BUDIBASE_DATASOURCE_TYPE, + source: SourceName.BUDIBASE, + } : ctx.request.body let datasource: Datasource = { ...baseDatasource, ...sdk.datasources.mergeConfigs(dataSourceBody, baseDatasource), } + + // this block is specific to GSheets, if no auth set, set it back + const auth = baseDatasource.config?.auth if (auth && !ctx.request.body.auth) { // don't strip auth config from DB datasource.config!.auth = auth @@ -204,7 +218,7 @@ async function destroyInternalTablesBySourceId(datasourceId: string) { const db = context.getAppDB() // Get all internal tables - const internalTables = await db.allDocs( + const internalTables = await db.allDocs( getTableParams(null, { include_docs: true, }) @@ -212,8 +226,8 @@ async function destroyInternalTablesBySourceId(datasourceId: string) { // Filter by datasource and return the docs. const datasourceTableDocs = internalTables.rows.reduce( - (acc: any, table: any) => { - if (table.doc.sourceId == datasourceId) { + (acc: Table[], table) => { + if (table.doc?.sourceId == datasourceId) { acc.push(table.doc) } return acc @@ -254,9 +268,9 @@ export async function destroy(ctx: UserCtx) { if (datasource.type === dbCore.BUDIBASE_DATASOURCE_TYPE) { await destroyInternalTablesBySourceId(datasourceId) } else { - const queries = await db.allDocs(getQueryParams(datasourceId)) + const queries = await db.allDocs(getQueryParams(datasourceId)) await db.bulkDocs( - queries.rows.map((row: any) => ({ + queries.rows.map(row => ({ _id: row.id, _rev: row.value.rev, _deleted: true, diff --git a/packages/server/src/api/controllers/integration.ts b/packages/server/src/api/controllers/integration.ts index 9cfde31e4c..57038f8401 100644 --- a/packages/server/src/api/controllers/integration.ts +++ b/packages/server/src/api/controllers/integration.ts @@ -1,7 +1,10 @@ import { getDefinition, getDefinitions } from "../../integrations" import { SourceName, UserCtx } from "@budibase/types" -const DISABLED_EXTERNAL_INTEGRATIONS = [SourceName.AIRTABLE] +const DISABLED_EXTERNAL_INTEGRATIONS = [ + SourceName.AIRTABLE, + SourceName.BUDIBASE, +] export async function fetch(ctx: UserCtx) { const definitions = await getDefinitions() diff --git a/packages/server/src/api/controllers/layout.ts b/packages/server/src/api/controllers/layout.ts index 69e4ad91ed..c0406f50ac 100644 --- a/packages/server/src/api/controllers/layout.ts +++ b/packages/server/src/api/controllers/layout.ts @@ -1,9 +1,17 @@ import { EMPTY_LAYOUT } from "../../constants/layouts" import { generateLayoutID, getScreenParams } from "../../db/utils" import { events, context } from "@budibase/backend-core" -import { BBContext, Layout } from "@budibase/types" +import { + BBContext, + Layout, + SaveLayoutRequest, + SaveLayoutResponse, + UserCtx, +} from "@budibase/types" -export async function save(ctx: BBContext) { +export async function save( + ctx: UserCtx +) { const db = context.getAppDB() let layout = ctx.request.body diff --git a/packages/server/src/api/controllers/permission.ts b/packages/server/src/api/controllers/permission.ts index e2bd6c40e5..cdfa6d8b1c 100644 --- a/packages/server/src/api/controllers/permission.ts +++ b/packages/server/src/api/controllers/permission.ts @@ -7,6 +7,10 @@ import { GetResourcePermsResponse, ResourcePermissionInfo, GetDependantResourcesResponse, + AddPermissionResponse, + AddPermissionRequest, + RemovePermissionRequest, + RemovePermissionResponse, } from "@budibase/types" import { getRoleParams } from "../../db/utils" import { @@ -16,9 +20,9 @@ import { import { removeFromArray } from "../../utilities" import sdk from "../../sdk" -const PermissionUpdateType = { - REMOVE: "remove", - ADD: "add", +const enum PermissionUpdateType { + REMOVE = "remove", + ADD = "add", } const SUPPORTED_LEVELS = CURRENTLY_SUPPORTED_LEVELS @@ -39,7 +43,7 @@ async function updatePermissionOnRole( resourceId, level, }: { roleId: string; resourceId: string; level: PermissionLevel }, - updateType: string + updateType: PermissionUpdateType ) { const allowedAction = await sdk.permissions.resourceActionAllowed({ resourceId, @@ -107,11 +111,15 @@ async function updatePermissionOnRole( } const response = await db.bulkDocs(docUpdates) - return response.map((resp: any) => { + return response.map(resp => { const version = docUpdates.find(role => role._id === resp.id)?.version - resp._id = roles.getExternalRoleID(resp.id, version) - delete resp.id - return resp + const _id = roles.getExternalRoleID(resp.id, version) + return { + _id, + rev: resp.rev, + error: resp.error, + reason: resp.reason, + } }) } @@ -189,13 +197,14 @@ export async function getDependantResources( } } -export async function addPermission(ctx: UserCtx) { - ctx.body = await updatePermissionOnRole(ctx.params, PermissionUpdateType.ADD) +export async function addPermission(ctx: UserCtx) { + const params: AddPermissionRequest = ctx.params + ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.ADD) } -export async function removePermission(ctx: UserCtx) { - ctx.body = await updatePermissionOnRole( - ctx.params, - PermissionUpdateType.REMOVE - ) +export async function removePermission( + ctx: UserCtx +) { + const params: RemovePermissionRequest = ctx.params + ctx.body = await updatePermissionOnRole(params, PermissionUpdateType.REMOVE) } diff --git a/packages/server/src/api/controllers/query/index.ts b/packages/server/src/api/controllers/query/index.ts index 768c921150..3c21537484 100644 --- a/packages/server/src/api/controllers/query/index.ts +++ b/packages/server/src/api/controllers/query/index.ts @@ -17,10 +17,12 @@ import { QueryPreview, QuerySchema, FieldType, - type ExecuteQueryRequest, - type ExecuteQueryResponse, - type Row, + ExecuteQueryRequest, + ExecuteQueryResponse, + Row, QueryParameter, + PreviewQueryRequest, + PreviewQueryResponse, } from "@budibase/types" import { ValidQueryNameRegex, utils as JsonUtils } from "@budibase/shared-core" @@ -73,7 +75,7 @@ const _import = async (ctx: UserCtx) => { } export { _import as import } -export async function save(ctx: UserCtx) { +export async function save(ctx: UserCtx) { const db = context.getAppDB() const query: Query = ctx.request.body @@ -134,14 +136,16 @@ function enrichParameters( return requestParameters } -export async function preview(ctx: UserCtx) { +export async function preview( + ctx: UserCtx +) { const { datasource, envVars } = await sdk.datasources.getWithEnvVars( ctx.request.body.datasourceId ) - const query: QueryPreview = ctx.request.body // preview may not have a queryId as it hasn't been saved, but if it does // this stops dynamic variables from calling the same query - const { fields, parameters, queryVerb, transformer, queryId, schema } = query + const { fields, parameters, queryVerb, transformer, queryId, schema } = + ctx.request.body let existingSchema = schema if (queryId && !existingSchema) { @@ -266,9 +270,7 @@ export async function preview(ctx: UserCtx) { }, } - const { rows, keys, info, extra } = (await Runner.run( - inputs - )) as QueryResponse + const { rows, keys, info, extra } = await Runner.run(inputs) const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys) // if existing schema, update to include any previous schema keys @@ -281,7 +283,7 @@ export async function preview(ctx: UserCtx) { } // remove configuration before sending event delete datasource.config - await events.query.previewed(datasource, query) + await events.query.previewed(datasource, ctx.request.body) ctx.body = { rows, nestedSchemaFields, @@ -295,7 +297,10 @@ export async function preview(ctx: UserCtx) { } async function execute( - ctx: UserCtx, + ctx: UserCtx< + ExecuteQueryRequest, + ExecuteQueryResponse | Record[] + >, opts: any = { rowsOnly: false, isAutomation: false } ) { const db = context.getAppDB() @@ -350,18 +355,23 @@ async function execute( } } -export async function executeV1(ctx: UserCtx) { +export async function executeV1( + ctx: UserCtx[]> +) { return execute(ctx, { rowsOnly: true, isAutomation: false }) } export async function executeV2( - ctx: UserCtx, + ctx: UserCtx< + ExecuteQueryRequest, + ExecuteQueryResponse | Record[] + >, { isAutomation }: { isAutomation?: boolean } = {} ) { return execute(ctx, { rowsOnly: false, isAutomation }) } -const removeDynamicVariables = async (queryId: any) => { +const removeDynamicVariables = async (queryId: string) => { const db = context.getAppDB() const query = await db.get(queryId) const datasource = await sdk.datasources.get(query.datasourceId) @@ -384,7 +394,7 @@ const removeDynamicVariables = async (queryId: any) => { export async function destroy(ctx: UserCtx) { const db = context.getAppDB() - const queryId = ctx.params.queryId + const queryId = ctx.params.queryId as string await removeDynamicVariables(queryId) const query = await db.get(queryId) const datasource = await sdk.datasources.get(query.datasourceId) diff --git a/packages/server/src/api/controllers/role.ts b/packages/server/src/api/controllers/role.ts index ae6b89e6d4..2f5340d2e6 100644 --- a/packages/server/src/api/controllers/role.ts +++ b/packages/server/src/api/controllers/role.ts @@ -7,8 +7,14 @@ import { } from "@budibase/backend-core" import { getUserMetadataParams, InternalTables } from "../../db/utils" import { + AccessibleRolesResponse, Database, + DestroyRoleResponse, + FetchRolesResponse, + FindRoleResponse, Role, + SaveRoleRequest, + SaveRoleResponse, UserCtx, UserMetadata, UserRoles, @@ -25,43 +31,36 @@ async function updateRolesOnUserTable( db: Database, roleId: string, updateOption: string, - roleVersion: string | undefined + roleVersion?: string ) { const table = await sdk.tables.getTable(InternalTables.USER_METADATA) - const schema = table.schema + const constraints = table.schema.roleId?.constraints + if (!constraints) { + return + } + const updatedRoleId = + roleVersion === roles.RoleIDVersion.NAME + ? roles.getExternalRoleID(roleId, roleVersion) + : roleId + const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId) const remove = updateOption === UpdateRolesOptions.REMOVED - let updated = false - for (let prop of Object.keys(schema)) { - if (prop === "roleId") { - updated = true - const constraints = schema[prop].constraints! - const updatedRoleId = - roleVersion === roles.RoleIDVersion.NAME - ? roles.getExternalRoleID(roleId, roleVersion) - : roleId - const indexOfRoleId = constraints.inclusion!.indexOf(updatedRoleId) - if (remove && indexOfRoleId !== -1) { - constraints.inclusion!.splice(indexOfRoleId, 1) - } else if (!remove && indexOfRoleId === -1) { - constraints.inclusion!.push(updatedRoleId) - } - break - } - } - if (updated) { - await db.put(table) + if (remove && indexOfRoleId !== -1) { + constraints.inclusion!.splice(indexOfRoleId, 1) + } else if (!remove && indexOfRoleId === -1) { + constraints.inclusion!.push(updatedRoleId) } + await db.put(table) } -export async function fetch(ctx: UserCtx) { +export async function fetch(ctx: UserCtx) { ctx.body = await roles.getAllRoles() } -export async function find(ctx: UserCtx) { +export async function find(ctx: UserCtx) { ctx.body = await roles.getRole(ctx.params.roleId) } -export async function save(ctx: UserCtx) { +export async function save(ctx: UserCtx) { const db = context.getAppDB() let { _id, name, inherits, permissionId, version } = ctx.request.body let isCreate = false @@ -107,11 +106,26 @@ export async function save(ctx: UserCtx) { ) role._rev = result.rev ctx.body = role + + const devDb = context.getDevAppDB() + const prodDb = context.getProdAppDB() + + if (await prodDb.exists()) { + const replication = new dbCore.Replication({ + source: devDb.name, + target: prodDb.name, + }) + await replication.replicate({ + filter: (doc: any, params: any) => { + return doc._id && doc._id.startsWith("role_") + }, + }) + } } -export async function destroy(ctx: UserCtx) { +export async function destroy(ctx: UserCtx) { const db = context.getAppDB() - let roleId = ctx.params.roleId + let roleId = ctx.params.roleId as string if (roles.isBuiltin(roleId)) { ctx.throw(400, "Cannot delete builtin role.") } else { @@ -144,14 +158,18 @@ export async function destroy(ctx: UserCtx) { ctx.status = 200 } -export async function accessible(ctx: UserCtx) { +export async function accessible(ctx: UserCtx) { let roleId = ctx.user?.roleId if (!roleId) { roleId = roles.BUILTIN_ROLE_IDS.PUBLIC } if (ctx.user && sharedSdk.users.isAdminOrBuilder(ctx.user)) { const appId = context.getAppId() - ctx.body = await roles.getAllRoleIds(appId) + if (!appId) { + ctx.body = [] + } else { + ctx.body = await roles.getAllRoleIds(appId) + } } else { ctx.body = await roles.getUserRoleIdHierarchy(roleId!) } diff --git a/packages/server/src/api/controllers/routing.ts b/packages/server/src/api/controllers/routing.ts index 4154c6b597..040cda4dd0 100644 --- a/packages/server/src/api/controllers/routing.ts +++ b/packages/server/src/api/controllers/routing.ts @@ -63,7 +63,7 @@ export async function fetch(ctx: UserCtx) { export async function clientFetch(ctx: UserCtx) { const routing = await getRoutingStructure() let roleId = ctx.user?.role?._id - const roleIds = await roles.getUserRoleIdHierarchy(roleId) + const roleIds = roleId ? await roles.getUserRoleIdHierarchy(roleId) : [] for (let topLevel of Object.values(routing.routes) as any) { for (let subpathKey of Object.keys(topLevel.subpaths)) { let found = false diff --git a/packages/server/src/api/controllers/row/ExternalRequest.ts b/packages/server/src/api/controllers/row/ExternalRequest.ts index b7dc02c0db..685af4e98e 100644 --- a/packages/server/src/api/controllers/row/ExternalRequest.ts +++ b/packages/server/src/api/controllers/row/ExternalRequest.ts @@ -7,6 +7,7 @@ import { FilterType, IncludeRelationship, ManyToManyRelationshipFieldMetadata, + ManyToOneRelationshipFieldMetadata, OneToManyRelationshipFieldMetadata, Operation, PaginationJson, @@ -18,6 +19,7 @@ import { SortJson, SortType, Table, + isManyToOne, } from "@budibase/types" import { breakExternalTableId, @@ -32,7 +34,9 @@ import { processObjectSync } from "@budibase/string-templates" import { cloneDeep } from "lodash/fp" import { processDates, processFormulas } from "../../../utilities/rowProcessor" import { db as dbCore } from "@budibase/backend-core" +import AliasTables from "./alias" import sdk from "../../../sdk" +import env from "../../../environment" export interface ManyRelationship { tableId?: string @@ -101,6 +105,39 @@ function buildFilters( } } +async function removeManyToManyRelationships( + rowId: string, + table: Table, + colName: string +) { + const tableId = table._id! + const filters = buildFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.DELETE), + body: { [colName]: null }, + filters, + }) + } else { + return [] + } +} + +async function removeOneToManyRelationships(rowId: string, table: Table) { + const tableId = table._id! + const filters = buildFilters(rowId, {}, table) + // safety check, if there are no filters on deletion bad things happen + if (Object.keys(filters).length !== 0) { + return getDatasourceAndQuery({ + endpoint: getEndpoint(tableId, Operation.UPDATE), + filters, + }) + } else { + return [] + } +} + /** * This function checks the incoming parameters to make sure all the inputs are * valid based on on the table schema. The main thing this is looking for is when a @@ -178,13 +215,13 @@ function generateIdForRow( function getEndpoint(tableId: string | undefined, operation: string) { if (!tableId) { - return {} + throw new Error("Cannot get endpoint information - no table ID specified") } const { datasourceId, tableName } = breakExternalTableId(tableId) return { - datasourceId, - entityId: tableName, - operation, + datasourceId: datasourceId!, + entityId: tableName!, + operation: operation as Operation, } } @@ -304,6 +341,18 @@ export class ExternalRequest { } } + async getRow(table: Table, rowId: string): Promise { + const response = await getDatasourceAndQuery({ + endpoint: getEndpoint(table._id!, Operation.READ), + filters: buildFilters(rowId, {}, table), + }) + if (Array.isArray(response) && response.length > 0) { + return response[0] + } else { + throw new Error(`Cannot fetch row by ID "${rowId}"`) + } + } + inputProcessing(row: Row | undefined, table: Table) { if (!row) { return { row, manyRelationships: [] } @@ -571,7 +620,9 @@ export class ExternalRequest { * information. */ async lookupRelations(tableId: string, row: Row) { - const related: { [key: string]: any } = {} + const related: { + [key: string]: { rows: Row[]; isMany: boolean; tableId: string } + } = {} const { tableName } = breakExternalTableId(tableId) if (!tableName) { return related @@ -589,14 +640,26 @@ export class ExternalRequest { ) { continue } - const isMany = field.relationshipType === RelationshipType.MANY_TO_MANY - const tableId = isMany ? field.through : field.tableId + let tableId: string | undefined, + lookupField: string | undefined, + fieldName: string | undefined + if (isManyToMany(field)) { + tableId = field.through + lookupField = primaryKey + fieldName = field.throughTo || primaryKey + } else if (isManyToOne(field)) { + tableId = field.tableId + lookupField = field.foreignKey + fieldName = field.fieldName + } + if (!tableId || !lookupField || !fieldName) { + throw new Error( + "Unable to lookup relationships - undefined column properties." + ) + } const { tableName: relatedTableName } = breakExternalTableId(tableId) // @ts-ignore const linkPrimaryKey = this.tables[relatedTableName].primary[0] - - const lookupField = isMany ? primaryKey : field.foreignKey - const fieldName = isMany ? field.throughTo || primaryKey : field.fieldName if (!lookupField || !row[lookupField]) { continue } @@ -609,9 +672,12 @@ export class ExternalRequest { }, }) // this is the response from knex if no rows found - const rows = !response[0].read ? response : [] - const storeTo = isMany ? field.throughFrom || linkPrimaryKey : fieldName - related[storeTo] = { rows, isMany, tableId } + const rows: Row[] = + !Array.isArray(response) || response?.[0].read ? [] : response + const storeTo = isManyToMany(field) + ? field.throughFrom || linkPrimaryKey + : fieldName + related[storeTo] = { rows, isMany: isManyToMany(field), tableId } } return related } @@ -697,24 +763,43 @@ export class ExternalRequest { continue } for (let row of rows) { - const filters = buildFilters(generateIdForRow(row, table), {}, table) - // safety check, if there are no filters on deletion bad things happen - if (Object.keys(filters).length !== 0) { - const op = isMany ? Operation.DELETE : Operation.UPDATE - const body = isMany ? null : { [colName]: null } - promises.push( - getDatasourceAndQuery({ - endpoint: getEndpoint(tableId, op), - body, - filters, - }) - ) + const rowId = generateIdForRow(row, table) + const promise: Promise = isMany + ? removeManyToManyRelationships(rowId, table, colName) + : removeOneToManyRelationships(rowId, table) + if (promise) { + promises.push(promise) } } } await Promise.all(promises) } + async removeRelationshipsToRow(table: Table, rowId: string) { + const row = await this.getRow(table, rowId) + const related = await this.lookupRelations(table._id!, row) + for (let column of Object.values(table.schema)) { + const relationshipColumn = column as RelationshipFieldMetadata + if (!isManyToOne(relationshipColumn)) { + continue + } + const { rows, isMany, tableId } = related[relationshipColumn.fieldName] + const table = this.getTable(tableId)! + await Promise.all( + rows.map(row => { + const rowId = generateIdForRow(row, table) + return isMany + ? removeManyToManyRelationships( + rowId, + table, + relationshipColumn.fieldName + ) + : removeOneToManyRelationships(rowId, table) + }) + ) + } + } + /** * This function is a bit crazy, but the exact purpose of it is to protect against the scenario in which * you have column overlap in relationships, e.g. we join a few different tables and they all have the @@ -804,7 +889,7 @@ export class ExternalRequest { } let json = { endpoint: { - datasourceId, + datasourceId: datasourceId!, entityId: tableName, operation, }, @@ -826,17 +911,30 @@ export class ExternalRequest { }, } - // can't really use response right now - const response = await getDatasourceAndQuery(json) - // handle many to many relationships now if we know the ID (could be auto increment) + // remove any relationships that could block deletion + if (operation === Operation.DELETE && id) { + await this.removeRelationshipsToRow(table, generateRowIdField(id)) + } + + // aliasing can be disabled fully if desired + let response + if (env.SQL_ALIASING_DISABLE) { + response = await getDatasourceAndQuery(json) + } else { + const aliasing = new AliasTables(Object.keys(this.tables)) + response = await aliasing.queryWithAliasing(json) + } + + const responseRows = Array.isArray(response) ? response : [] + // handle many-to-many relationships now if we know the ID (could be auto increment) if (operation !== Operation.READ) { await this.handleManyRelationships( table._id || "", - response[0], + responseRows[0], processed.manyRelationships ) } - const output = this.outputProcessing(response, table, relationships) + const output = this.outputProcessing(responseRows, table, relationships) // if reading it'll just be an array of rows, return whole thing if (operation === Operation.READ) { return ( diff --git a/packages/server/src/api/controllers/row/alias.ts b/packages/server/src/api/controllers/row/alias.ts new file mode 100644 index 0000000000..1d586c54fd --- /dev/null +++ b/packages/server/src/api/controllers/row/alias.ts @@ -0,0 +1,204 @@ +import { + Datasource, + DatasourcePlusQueryResponse, + Operation, + QueryJson, + Row, + SearchFilters, +} from "@budibase/types" +import { getSQLClient } from "../../../sdk/app/rows/utils" +import { cloneDeep } from "lodash" +import sdk from "../../../sdk" +import { makeExternalQuery } from "../../../integrations/base/query" +import { SqlClient } from "../../../integrations/utils" + +const WRITE_OPERATIONS: Operation[] = [ + Operation.CREATE, + Operation.UPDATE, + Operation.DELETE, +] +const DISABLED_WRITE_CLIENTS: SqlClient[] = [ + SqlClient.MY_SQL, + SqlClient.MS_SQL, + SqlClient.ORACLE, +] + +class CharSequence { + static alphabet = "abcdefghijklmnopqrstuvwxyz" + counters: number[] + + constructor() { + this.counters = [0] + } + + getCharacter(): string { + const char = this.counters.map(i => CharSequence.alphabet[i]).join("") + for (let i = this.counters.length - 1; i >= 0; i--) { + if (this.counters[i] < CharSequence.alphabet.length - 1) { + this.counters[i]++ + return char + } + this.counters[i] = 0 + } + this.counters.unshift(0) + return char + } +} + +export default class AliasTables { + aliases: Record + tableAliases: Record + tableNames: string[] + charSeq: CharSequence + + constructor(tableNames: string[]) { + this.tableNames = tableNames + this.aliases = {} + this.tableAliases = {} + this.charSeq = new CharSequence() + } + + isAliasingEnabled(json: QueryJson, datasource: Datasource) { + const fieldLength = json.resource?.fields?.length + if (!fieldLength || fieldLength <= 0) { + return false + } + try { + const sqlClient = getSQLClient(datasource) + const isWrite = WRITE_OPERATIONS.includes(json.endpoint.operation) + const isDisabledClient = DISABLED_WRITE_CLIENTS.includes(sqlClient) + if (isWrite && isDisabledClient) { + return false + } + } catch (err) { + // if we can't get an SQL client, we can't alias + return false + } + return true + } + + getAlias(tableName: string) { + if (this.aliases[tableName]) { + return this.aliases[tableName] + } + const char = this.charSeq.getCharacter() + this.aliases[tableName] = char + this.tableAliases[char] = tableName + return char + } + + aliasField(field: string) { + const tableNames = this.tableNames + if (field.includes(".")) { + const [tableName, column] = field.split(".") + const foundTableName = tableNames.find(name => { + const idx = tableName.indexOf(name) + if (idx === -1 || idx > 1) { + return + } + // this might look a bit mad, but the idea is if the field is wrapped, say in "", `` or [] + // then the idx of the table name will be 1, and we should allow for it ending in a closing + // character - otherwise it should be the full length if the index is zero + const allowedCharacterDiff = idx * 2 + return Math.abs(tableName.length - name.length) <= allowedCharacterDiff + }) + if (foundTableName) { + const aliasedTableName = tableName.replace( + foundTableName, + this.getAlias(foundTableName) + ) + field = `${aliasedTableName}.${column}` + } + } + return field + } + + reverse(rows: T): T { + const process = (row: Row) => { + const final: Row = {} + for (let [key, value] of Object.entries(row)) { + if (!key.includes(".")) { + final[key] = value + } else { + const [alias, column] = key.split(".") + const tableName = this.tableAliases[alias] || alias + final[`${tableName}.${column}`] = value + } + } + return final + } + if (Array.isArray(rows)) { + return rows.map(row => process(row)) as T + } else { + return process(rows) as T + } + } + + aliasMap(tableNames: (string | undefined)[]) { + const map: Record = {} + for (let tableName of tableNames) { + if (tableName) { + map[tableName] = this.getAlias(tableName) + } + } + return map + } + + async queryWithAliasing(json: QueryJson): DatasourcePlusQueryResponse { + const datasourceId = json.endpoint.datasourceId + const datasource = await sdk.datasources.get(datasourceId) + + const aliasingEnabled = this.isAliasingEnabled(json, datasource) + if (aliasingEnabled) { + json = cloneDeep(json) + // run through the query json to update anywhere a table may be used + if (json.resource?.fields) { + json.resource.fields = json.resource.fields.map(field => + this.aliasField(field) + ) + } + if (json.filters) { + for (let [filterKey, filter] of Object.entries(json.filters)) { + if (typeof filter !== "object") { + continue + } + const aliasedFilters: typeof filter = {} + for (let key of Object.keys(filter)) { + aliasedFilters[this.aliasField(key)] = filter[key] + } + json.filters[filterKey as keyof SearchFilters] = aliasedFilters + } + } + if (json.meta?.table) { + this.getAlias(json.meta.table.name) + } + if (json.meta?.tables) { + Object.keys(json.meta.tables).forEach(tableName => + this.getAlias(tableName) + ) + } + if (json.relationships) { + json.relationships = json.relationships.map(relationship => ({ + ...relationship, + aliases: this.aliasMap([ + relationship.through, + relationship.tableName, + json.endpoint.entityId, + ]), + })) + } + // invert and return + const invertedTableAliases: Record = {} + for (let [key, value] of Object.entries(this.tableAliases)) { + invertedTableAliases[value] = key + } + json.tableAliases = invertedTableAliases + } + const response = await makeExternalQuery(datasource, json) + if (Array.isArray(response) && aliasingEnabled) { + return this.reverse(response) + } else { + return response + } + } +} diff --git a/packages/server/src/api/controllers/row/index.ts b/packages/server/src/api/controllers/row/index.ts index 1ad8a2a695..54c294c42b 100644 --- a/packages/server/src/api/controllers/row/index.ts +++ b/packages/server/src/api/controllers/row/index.ts @@ -211,7 +211,7 @@ export async function validate(ctx: Ctx) { } } -export async function fetchEnrichedRow(ctx: any) { +export async function fetchEnrichedRow(ctx: UserCtx) { const tableId = utils.getTableId(ctx) ctx.body = await pickApi(tableId).fetchEnrichedRow(ctx) } @@ -223,7 +223,8 @@ export const exportRows = async ( const format = ctx.query.format - const { rows, columns, query, sort, sortOrder } = ctx.request.body + const { rows, columns, query, sort, sortOrder, delimiter, customHeaders } = + ctx.request.body if (typeof format !== "string" || !exporters.isFormat(format)) { ctx.throw( 400, @@ -241,6 +242,8 @@ export const exportRows = async ( query, sort, sortOrder, + delimiter, + customHeaders, }) ctx.attachment(fileName) ctx.body = apiFileReturn(content) diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts index 3ee08fff2e..cc903bd74a 100644 --- a/packages/server/src/api/controllers/row/internal.ts +++ b/packages/server/src/api/controllers/row/internal.ts @@ -189,11 +189,12 @@ export async function fetchEnrichedRow(ctx: UserCtx) { const tableId = utils.getTableId(ctx) const rowId = ctx.params.rowId as string // need table to work out where links go in row, as well as the link docs - const [table, row, links] = await Promise.all([ + const [table, links] = await Promise.all([ sdk.tables.getTable(tableId), - utils.findRow(ctx, tableId, rowId), linkRows.getLinkDocuments({ tableId, rowId, fieldName }), ]) + let row = await utils.findRow(ctx, tableId, rowId) + row = await outputProcessing(table, row) const linkVals = links as LinkDocumentValue[] // look up the actual rows based on the ids diff --git a/packages/server/src/api/controllers/screen.ts b/packages/server/src/api/controllers/screen.ts index 446fe2e5fa..ee8e0ff892 100644 --- a/packages/server/src/api/controllers/screen.ts +++ b/packages/server/src/api/controllers/screen.ts @@ -7,7 +7,13 @@ import { roles, } from "@budibase/backend-core" import { updateAppPackage } from "./application" -import { Plugin, ScreenProps, BBContext, Screen } from "@budibase/types" +import { + Plugin, + ScreenProps, + BBContext, + Screen, + UserCtx, +} from "@budibase/types" import { builderSocket } from "../../websockets" export async function fetch(ctx: BBContext) { @@ -31,7 +37,7 @@ export async function fetch(ctx: BBContext) { ) } -export async function save(ctx: BBContext) { +export async function save(ctx: UserCtx) { const db = context.getAppDB() let screen = ctx.request.body diff --git a/packages/server/src/api/controllers/static/index.ts b/packages/server/src/api/controllers/static/index.ts index 5a3803e6d5..c718d5f704 100644 --- a/packages/server/src/api/controllers/static/index.ts +++ b/packages/server/src/api/controllers/static/index.ts @@ -170,6 +170,7 @@ export const serveApp = async function (ctx: Ctx) { if (!env.isJest()) { const plugins = objectStore.enrichPluginURLs(appInfo.usedPlugins) const { head, html, css } = AppComponent.render({ + title: branding?.platformTitle || `${appInfo.name}`, metaImage: branding?.metaImageUrl || "https://res.cloudinary.com/daog6scxm/image/upload/v1698759482/meta-images/plain-branded-meta-image-coral_ocxmgu.png", diff --git a/packages/server/src/api/controllers/table/external.ts b/packages/server/src/api/controllers/table/external.ts index f035822068..c85b46a95c 100644 --- a/packages/server/src/api/controllers/table/external.ts +++ b/packages/server/src/api/controllers/table/external.ts @@ -6,6 +6,7 @@ import { BulkImportRequest, BulkImportResponse, Operation, + RenameColumn, SaveTableRequest, SaveTableResponse, Table, @@ -25,9 +26,12 @@ function getDatasourceId(table: Table) { return breakExternalTableId(table._id).datasourceId } -export async function save(ctx: UserCtx) { +export async function save( + ctx: UserCtx, + renaming?: RenameColumn +) { const inputs = ctx.request.body - const renaming = inputs?._rename + const adding = inputs?._add // can't do this right now delete inputs.rows const tableId = ctx.request.body._id @@ -40,7 +44,7 @@ export async function save(ctx: UserCtx) { const { datasource, table } = await sdk.tables.external.save( datasourceId!, inputs, - { tableId, renaming } + { tableId, renaming, adding } ) builderSocket?.emitDatasourceUpdate(ctx, datasource) return table diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts index 55a896373f..69305c461e 100644 --- a/packages/server/src/api/controllers/table/index.ts +++ b/packages/server/src/api/controllers/table/index.ts @@ -74,8 +74,15 @@ export async function save(ctx: UserCtx) { const appId = ctx.appId const table = ctx.request.body const isImport = table.rows + const renaming = ctx.request.body._rename - let savedTable = await pickApi({ table }).save(ctx) + const api = pickApi({ table }) + // do not pass _rename or _add if saving to CouchDB + if (api === internal) { + delete ctx.request.body._add + delete ctx.request.body._rename + } + let savedTable = await api.save(ctx, renaming) if (!table._id) { await events.table.created(savedTable) savedTable = sdk.tables.enrichViewSchemas(savedTable) diff --git a/packages/server/src/api/controllers/table/internal.ts b/packages/server/src/api/controllers/table/internal.ts index 8e90007d88..eb5e4b6c41 100644 --- a/packages/server/src/api/controllers/table/internal.ts +++ b/packages/server/src/api/controllers/table/internal.ts @@ -12,11 +12,12 @@ import { } from "@budibase/types" import sdk from "../../../sdk" -export async function save(ctx: UserCtx) { +export async function save( + ctx: UserCtx, + renaming?: RenameColumn +) { const { rows, ...rest } = ctx.request.body - let tableToSave: Table & { - _rename?: RenameColumn - } = { + let tableToSave: Table = { _id: generateTableID(), ...rest, // Ensure these fields are populated, even if not sent in the request @@ -28,15 +29,12 @@ export async function save(ctx: UserCtx) { tableToSave.views = {} } - const renaming = tableToSave._rename - delete tableToSave._rename - try { const { table } = await sdk.tables.internal.save(tableToSave, { user: ctx.user, rowsToImport: rows, tableId: ctx.request.body._id, - renaming: renaming, + renaming, }) return table diff --git a/packages/server/src/api/controllers/user.ts b/packages/server/src/api/controllers/user.ts index 108e29fd3d..7fc5c6e1bc 100644 --- a/packages/server/src/api/controllers/user.ts +++ b/packages/server/src/api/controllers/user.ts @@ -1,6 +1,6 @@ import { generateUserFlagID, InternalTables } from "../../db/utils" import { getFullUser } from "../../utilities/users" -import { context } from "@budibase/backend-core" +import { cache, context } from "@budibase/backend-core" import { ContextUserMetadata, Ctx, diff --git a/packages/server/src/api/controllers/view/exporters.ts b/packages/server/src/api/controllers/view/exporters.ts index d6caff6035..3b5f951dca 100644 --- a/packages/server/src/api/controllers/view/exporters.ts +++ b/packages/server/src/api/controllers/view/exporters.ts @@ -1,7 +1,19 @@ import { Row, TableSchema } from "@budibase/types" -export function csv(headers: string[], rows: Row[]) { - let csv = headers.map(key => `"${key}"`).join(",") +function getHeaders( + headers: string[], + customHeaders: { [key: string]: string } +) { + return headers.map(header => `"${customHeaders[header] || header}"`) +} + +export function csv( + headers: string[], + rows: Row[], + delimiter: string = ",", + customHeaders: { [key: string]: string } = {} +) { + let csv = getHeaders(headers, customHeaders).join(delimiter) for (let row of rows) { csv = `${csv}\n${headers @@ -15,7 +27,7 @@ export function csv(headers: string[], rows: Row[]) { : "" return val.trim() }) - .join(",")}` + .join(delimiter)}` } return csv } diff --git a/packages/server/src/api/routes/application.ts b/packages/server/src/api/routes/application.ts index f72a8c3c22..07db85451a 100644 --- a/packages/server/src/api/routes/application.ts +++ b/packages/server/src/api/routes/application.ts @@ -4,7 +4,6 @@ import * as deploymentController from "../controllers/deploy" import authorized from "../../middleware/authorized" import { permissions } from "@budibase/backend-core" import { applicationValidator } from "./utils/validators" -import { importToApp } from "../controllers/application" const router: Router = new Router() @@ -56,12 +55,12 @@ router ) .delete( "/api/applications/:appId", - authorized(permissions.GLOBAL_BUILDER), + authorized(permissions.BUILDER), controller.destroy ) .post( "/api/applications/:appId/duplicate", - authorized(permissions.GLOBAL_BUILDER), + authorized(permissions.BUILDER), controller.duplicateApp ) .post( diff --git a/packages/server/src/api/routes/tests/apikeys.spec.js b/packages/server/src/api/routes/tests/apikeys.spec.js index eb6933af7d..678da38f28 100644 --- a/packages/server/src/api/routes/tests/apikeys.spec.js +++ b/packages/server/src/api/routes/tests/apikeys.spec.js @@ -13,7 +13,7 @@ describe("/api/keys", () => { describe("fetch", () => { it("should allow fetching", async () => { - await setup.switchToSelfHosted(async () => { + await config.withEnv({ SELF_HOSTED: "true" }, async () => { const res = await request .get(`/api/keys`) .set(config.defaultHeaders()) @@ -34,7 +34,7 @@ describe("/api/keys", () => { describe("update", () => { it("should allow updating a value", async () => { - await setup.switchToSelfHosted(async () => { + await config.withEnv({ SELF_HOSTED: "true" }, async () => { const res = await request .put(`/api/keys/TEST`) .send({ diff --git a/packages/server/src/api/routes/tests/application.spec.ts b/packages/server/src/api/routes/tests/application.spec.ts index d202e31d2b..2a9e723423 100644 --- a/packages/server/src/api/routes/tests/application.spec.ts +++ b/packages/server/src/api/routes/tests/application.spec.ts @@ -11,65 +11,55 @@ jest.mock("../../../utilities/redis", () => ({ checkDebounce: jest.fn(), shutdown: jest.fn(), })) -import { clearAllApps, checkBuilderEndpoint } from "./utilities/TestFunctions" +import { checkBuilderEndpoint } from "./utilities/TestFunctions" import * as setup from "./utilities" import { AppStatus } from "../../../db/utils" import { events, utils, context } from "@budibase/backend-core" import env from "../../../environment" - -jest.setTimeout(15000) +import { type App } from "@budibase/types" +import tk from "timekeeper" +import * as uuid from "uuid" describe("/applications", () => { - let request = setup.getRequest() let config = setup.getConfig() + let app: App afterAll(setup.afterAll) - - beforeAll(async () => { - await config.init() - }) + beforeAll(async () => await config.init()) beforeEach(async () => { + app = await config.api.application.create({ name: utils.newid() }) + const deployment = await config.api.application.publish(app.appId) + expect(deployment.status).toBe("SUCCESS") jest.clearAllMocks() }) describe("create", () => { it("creates empty app", async () => { - const res = await request - .post("/api/applications") - .field("name", utils.newid()) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() + const app = await config.api.application.create({ name: utils.newid() }) + expect(app._id).toBeDefined() expect(events.app.created).toBeCalledTimes(1) }) it("creates app from template", async () => { - const res = await request - .post("/api/applications") - .field("name", utils.newid()) - .field("useTemplate", "true") - .field("templateKey", "test") - .field("templateString", "{}") // override the file download - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() + const app = await config.api.application.create({ + name: utils.newid(), + useTemplate: "true", + templateKey: "test", + templateString: "{}", + }) + expect(app._id).toBeDefined() expect(events.app.created).toBeCalledTimes(1) expect(events.app.templateImported).toBeCalledTimes(1) }) it("creates app from file", async () => { - const res = await request - .post("/api/applications") - .field("name", utils.newid()) - .field("useTemplate", "true") - .set(config.defaultHeaders()) - .attach("templateFile", "src/api/routes/tests/data/export.txt") - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() + const app = await config.api.application.create({ + name: utils.newid(), + useTemplate: "true", + templateFile: "src/api/routes/tests/data/export.txt", + }) + expect(app._id).toBeDefined() expect(events.app.created).toBeCalledTimes(1) expect(events.app.fileImported).toBeCalledTimes(1) }) @@ -84,24 +74,21 @@ describe("/applications", () => { }) it("migrates navigation settings from old apps", async () => { - const res = await request - .post("/api/applications") - .field("name", "Old App") - .field("useTemplate", "true") - .set(config.defaultHeaders()) - .attach("templateFile", "src/api/routes/tests/data/old-app.txt") - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._id).toBeDefined() - expect(res.body.navigation).toBeDefined() - expect(res.body.navigation.hideLogo).toBe(true) - expect(res.body.navigation.title).toBe("Custom Title") - expect(res.body.navigation.hideLogo).toBe(true) - expect(res.body.navigation.navigation).toBe("Left") - expect(res.body.navigation.navBackground).toBe( + const app = await config.api.application.create({ + name: utils.newid(), + useTemplate: "true", + templateFile: "src/api/routes/tests/data/old-app.txt", + }) + expect(app._id).toBeDefined() + expect(app.navigation).toBeDefined() + expect(app.navigation!.hideLogo).toBe(true) + expect(app.navigation!.title).toBe("Custom Title") + expect(app.navigation!.hideLogo).toBe(true) + expect(app.navigation!.navigation).toBe("Left") + expect(app.navigation!.navBackground).toBe( "var(--spectrum-global-color-blue-600)" ) - expect(res.body.navigation.navTextColor).toBe( + expect(app.navigation!.navTextColor).toBe( "var(--spectrum-global-color-gray-50)" ) expect(events.app.created).toBeCalledTimes(1) @@ -109,203 +96,118 @@ describe("/applications", () => { }) it("should reject with a known name", async () => { - let createError - - const app = config.getApp() - expect(app).toBeDefined() - try { - await config.createApp(app?.name!) - } catch (err: any) { - createError = err - // Must be reset back to its original value or it breaks all - // subsequent tests - config.appId = app?.appId! - } - - expect(createError).toBeDefined() - expect(createError.message).toEqual( - "Error 400 - App name is already in use." - ) + await config.api.application.create({ name: app.name }, { status: 400 }) }) it("should reject with a known url", async () => { - let createError - - const app = config.getApp() - expect(app).toBeDefined() - try { - await config.createApp("made up", app?.url!) - } catch (err: any) { - createError = err - config.appId = app?.appId! - } - - expect(createError).toBeDefined() - expect(createError.message).toEqual( - "Error 400 - App URL is already in use." + await config.api.application.create( + { name: "made up", url: app?.url! }, + { status: 400 } ) }) }) describe("fetch", () => { - beforeEach(async () => { - // Clean all apps but the onde from config - await clearAllApps(config.getTenantId(), [config.getAppId()!]) - }) - it("lists all applications", async () => { - await config.createApp("app1") - await config.createApp("app2") - - const res = await request - .get(`/api/applications?status=${AppStatus.DEV}`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - - // two created apps + the inited app - expect(res.body.length).toBe(3) + const apps = await config.api.application.fetch({ status: AppStatus.DEV }) + expect(apps.length).toBeGreaterThan(0) }) }) describe("fetchAppDefinition", () => { it("should be able to get an apps definition", async () => { - const res = await request - .get(`/api/applications/${config.getAppId()}/definition`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.libraries.length).toEqual(1) + const res = await config.api.application.getDefinition(app.appId) + expect(res.libraries.length).toEqual(1) }) }) describe("fetchAppPackage", () => { it("should be able to fetch the app package", async () => { - const res = await request - .get(`/api/applications/${config.getAppId()}/appPackage`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.application).toBeDefined() - expect(res.body.application.appId).toEqual(config.getAppId()) + const res = await config.api.application.getAppPackage(app.appId) + expect(res.application).toBeDefined() + expect(res.application.appId).toEqual(config.getAppId()) }) }) describe("update", () => { it("should be able to update the app package", async () => { - const res = await request - .put(`/api/applications/${config.getAppId()}`) - .send({ - name: "TEST_APP", - }) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._rev).toBeDefined() + const updatedApp = await config.api.application.update(app.appId, { + name: "TEST_APP", + }) + expect(updatedApp._rev).toBeDefined() expect(events.app.updated).toBeCalledTimes(1) }) }) describe("publish", () => { it("should publish app with dev app ID", async () => { - const appId = config.getAppId() - await request - .post(`/api/applications/${appId}/publish`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.publish(app.appId) expect(events.app.published).toBeCalledTimes(1) }) it("should publish app with prod app ID", async () => { - const appId = config.getProdAppId() - await request - .post(`/api/applications/${appId}/publish`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.publish(app.appId.replace("_dev", "")) expect(events.app.published).toBeCalledTimes(1) }) }) describe("manage client library version", () => { it("should be able to update the app client library version", async () => { - await request - .post(`/api/applications/${config.getAppId()}/client/update`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.updateClient(app.appId) expect(events.app.versionUpdated).toBeCalledTimes(1) }) it("should be able to revert the app client library version", async () => { - // We need to first update the version so that we can then revert - await request - .post(`/api/applications/${config.getAppId()}/client/update`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - await request - .post(`/api/applications/${config.getAppId()}/client/revert`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.updateClient(app.appId) + await config.api.application.revertClient(app.appId) expect(events.app.versionReverted).toBeCalledTimes(1) }) }) describe("edited at", () => { - it("middleware should set edited at", async () => { - const headers = config.defaultHeaders() - headers["referer"] = `/${config.getAppId()}/test` - const res = await request - .put(`/api/applications/${config.getAppId()}`) - .send({ - name: "UPDATED_NAME", - }) - .set(headers) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body._rev).toBeDefined() - // retrieve the app to check it - const getRes = await request - .get(`/api/applications/${config.getAppId()}/appPackage`) - .set(headers) - .expect("Content-Type", /json/) - .expect(200) - expect(getRes.body.application.updatedAt).toBeDefined() + it("middleware should set updatedAt", async () => { + const app = await tk.withFreeze( + "2021-01-01", + async () => await config.api.application.create({ name: utils.newid() }) + ) + expect(app.updatedAt).toEqual("2021-01-01T00:00:00.000Z") + + const updatedApp = await tk.withFreeze( + "2021-02-01", + async () => + await config.api.application.update(app.appId, { + name: "UPDATED_NAME", + }) + ) + expect(updatedApp._rev).toBeDefined() + expect(updatedApp.updatedAt).toEqual("2021-02-01T00:00:00.000Z") + + const fetchedApp = await config.api.application.get(app.appId) + expect(fetchedApp.updatedAt).toEqual("2021-02-01T00:00:00.000Z") }) }) describe("sync", () => { it("app should sync correctly", async () => { - const res = await request - .post(`/api/applications/${config.getAppId()}/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.message).toEqual("App sync completed successfully.") + const { message } = await config.api.application.sync(app.appId) + expect(message).toEqual("App sync completed successfully.") }) it("app should not sync if production", async () => { - const res = await request - .post(`/api/applications/app_123456/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(400) - expect(res.body.message).toEqual( + const { message } = await config.api.application.sync( + app.appId.replace("_dev", ""), + { status: 400 } + ) + + expect(message).toEqual( "This action cannot be performed for production apps" ) }) it("app should not sync if sync is disabled", async () => { env._set("DISABLE_AUTO_PROD_APP_SYNC", true) - const res = await request - .post(`/api/applications/${config.getAppId()}/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - expect(res.body.message).toEqual( + const { message } = await config.api.application.sync(app.appId) + expect(message).toEqual( "App sync disabled. You can reenable with the DISABLE_AUTO_PROD_APP_SYNC environment variable." ) env._set("DISABLE_AUTO_PROD_APP_SYNC", false) @@ -313,51 +215,26 @@ describe("/applications", () => { }) describe("unpublish", () => { - beforeEach(async () => { - // We want to republish as the unpublish will delete the prod app - await config.publish() - }) - it("should unpublish app with dev app ID", async () => { - const appId = config.getAppId() - await request - .post(`/api/applications/${appId}/unpublish`) - .set(config.defaultHeaders()) - .expect(204) + await config.api.application.unpublish(app.appId) expect(events.app.unpublished).toBeCalledTimes(1) }) it("should unpublish app with prod app ID", async () => { - const appId = config.getProdAppId() - await request - .post(`/api/applications/${appId}/unpublish`) - .set(config.defaultHeaders()) - .expect(204) + await config.api.application.unpublish(app.appId.replace("_dev", "")) expect(events.app.unpublished).toBeCalledTimes(1) }) }) describe("delete", () => { it("should delete published app and dev apps with dev app ID", async () => { - await config.createApp("to-delete") - const appId = config.getAppId() - await request - .delete(`/api/applications/${appId}`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.delete(app.appId) expect(events.app.deleted).toBeCalledTimes(1) expect(events.app.unpublished).toBeCalledTimes(1) }) it("should delete published app and dev app with prod app ID", async () => { - await config.createApp("to-delete") - const appId = config.getProdAppId() - await request - .delete(`/api/applications/${appId}`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.delete(app.appId.replace("_dev", "")) expect(events.app.deleted).toBeCalledTimes(1) expect(events.app.unpublished).toBeCalledTimes(1) }) @@ -365,89 +242,170 @@ describe("/applications", () => { describe("POST /api/applications/:appId/duplicate", () => { it("should duplicate an existing app", async () => { - await config.createApp("to-dupe") - const sourceAppId = config.getProdAppId() - - const resp = await request - .post(`/api/applications/${sourceAppId}/duplicate`) - .field("name", "to-dupe copy") - .field("url", "/to-dupe-copy") - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + const resp = await config.api.application.duplicateApp( + app.appId, + { + name: "to-dupe copy", + url: "/to-dupe-copy", + }, + { + status: 200, + } + ) expect(events.app.duplicated).toBeCalled() - expect(resp.body.duplicateAppId).toBeDefined() - expect(resp.body.sourceAppId).toEqual(sourceAppId) - expect(resp.body.duplicateAppId).not.toEqual(sourceAppId) + expect(resp.duplicateAppId).toBeDefined() + expect(resp.sourceAppId).toEqual(app.appId) + expect(resp.duplicateAppId).not.toEqual(app.appId) }) it("should reject an unknown app id with a 404", async () => { - await request - .post(`/api/applications/app_1234_not_real/duplicate`) - .field("name", "to-dupe copy") - .field("url", "/to-dupe-copy") - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(404) + await config.api.application.duplicateApp( + app.appId.slice(0, -1) + "a", + { + name: "to-dupe 123", + url: "/to-dupe-123", + }, + { + status: 404, + } + ) }) it("should reject with a known name", async () => { - await config.createApp("known name") - const sourceAppId = config.getProdAppId() - const resp = await request - .post(`/api/applications/${sourceAppId}/duplicate`) - .field("name", "known name") - .field("url", "/known-name") - .set(config.defaultHeaders()) - .expect(400) + const resp = await config.api.application.duplicateApp( + app.appId, + { + name: app.name, + url: "/known-name", + }, + { status: 400 } + ) - expect(resp.body.message).toEqual("App name is already in use.") + expect(resp.message).toEqual("App name is already in use.") }) it("should reject with a known url", async () => { - await config.createApp("known-url") - const sourceAppId = config.getProdAppId() + const resp = await config.api.application.duplicateApp( + app.appId, + { + name: "this is fine", + url: app.url, + }, + { status: 400 } + ) - const resp = await request - .post(`/api/applications/${sourceAppId}/duplicate`) - .field("name", "this is fine") - .field("url", "/known-url") - .set(config.defaultHeaders()) - .expect(400) - - expect(resp.body.message).toEqual("App URL is already in use.") + expect(resp.message).toEqual("App URL is already in use.") }) }) describe("POST /api/applications/:appId/sync", () => { it("should not sync automation logs", async () => { - // setup the apps - await config.createApp("testing-auto-logs") const automation = await config.createAutomation() - await config.publish() - await context.doInAppContext(config.getProdAppId(), () => { - return config.createAutomationLog(automation) - }) + await context.doInAppContext(app.appId, () => + config.createAutomationLog(automation) + ) - // do the sync - const appId = config.getAppId() - await request - .post(`/api/applications/${appId}/sync`) - .set(config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) + await config.api.application.sync(app.appId) // does exist in prod const prodLogs = await config.getAutomationLogs() expect(prodLogs.data.length).toBe(1) - // delete prod app so we revert to dev log search - await config.unpublish() + await config.api.application.unpublish(app.appId) // doesn't exist in dev const devLogs = await config.getAutomationLogs() expect(devLogs.data.length).toBe(0) }) }) + + describe("permissions", () => { + it("should only return apps a user has access to", async () => { + let user = await config.createUser({ + builder: { global: false }, + admin: { global: false }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(0) + }) + + user = await config.globalUser({ + ...user, + builder: { + apps: [config.getProdAppId()], + }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) + }) + + it("should only return apps a user has access to through a custom role", async () => { + let user = await config.createUser({ + builder: { global: false }, + admin: { global: false }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(0) + }) + + const role = await config.api.roles.save({ + name: "Test", + inherits: "PUBLIC", + permissionId: "read_only", + version: "name", + }) + + user = await config.globalUser({ + ...user, + roles: { + [config.getProdAppId()]: role.name, + }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) + }) + + it.only("should only return apps a user has access to through a custom role on a group", async () => { + let user = await config.createUser({ + builder: { global: false }, + admin: { global: false }, + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(0) + }) + + const roleName = uuid.v4().replace(/-/g, "") + const role = await config.api.roles.save({ + name: roleName, + inherits: "PUBLIC", + permissionId: "read_only", + version: "name", + }) + + const group = await config.createGroup(role._id!) + + user = await config.globalUser({ + ...user, + userGroups: [group._id!], + }) + + await config.withUser(user, async () => { + const apps = await config.api.application.fetch() + expect(apps).toHaveLength(1) + }) + }) + }) }) diff --git a/packages/server/src/api/routes/tests/attachment.spec.ts b/packages/server/src/api/routes/tests/attachment.spec.ts index e230b0688a..aa02ea898e 100644 --- a/packages/server/src/api/routes/tests/attachment.spec.ts +++ b/packages/server/src/api/routes/tests/attachment.spec.ts @@ -29,7 +29,7 @@ describe("/api/applications/:appId/sync", () => { let resp = (await config.api.attachment.process( "ohno.exe", Buffer.from([0]), - { expectStatus: 400 } + { status: 400 } )) as unknown as APIError expect(resp.message).toContain("invalid extension") }) @@ -40,7 +40,7 @@ describe("/api/applications/:appId/sync", () => { let resp = (await config.api.attachment.process( "OHNO.EXE", Buffer.from([0]), - { expectStatus: 400 } + { status: 400 } )) as unknown as APIError expect(resp.message).toContain("invalid extension") }) @@ -51,7 +51,7 @@ describe("/api/applications/:appId/sync", () => { undefined as any, undefined as any, { - expectStatus: 400, + status: 400, } )) as unknown as APIError expect(resp.message).toContain("No file provided") diff --git a/packages/server/src/api/routes/tests/automation.spec.ts b/packages/server/src/api/routes/tests/automation.spec.ts index 178189555d..ee8fc7d544 100644 --- a/packages/server/src/api/routes/tests/automation.spec.ts +++ b/packages/server/src/api/routes/tests/automation.spec.ts @@ -394,7 +394,7 @@ describe("/automations", () => { it("deletes a automation by its ID", async () => { const automation = await config.createAutomation() const res = await request - .delete(`/api/automations/${automation.id}/${automation.rev}`) + .delete(`/api/automations/${automation._id}/${automation._rev}`) .set(config.defaultHeaders()) .expect("Content-Type", /json/) .expect(200) @@ -408,7 +408,7 @@ describe("/automations", () => { await checkBuilderEndpoint({ config, method: "DELETE", - url: `/api/automations/${automation.id}/${automation._rev}`, + url: `/api/automations/${automation._id}/${automation._rev}`, }) }) }) diff --git a/packages/server/src/api/routes/tests/backup.spec.ts b/packages/server/src/api/routes/tests/backup.spec.ts index acfac783db..c862106d58 100644 --- a/packages/server/src/api/routes/tests/backup.spec.ts +++ b/packages/server/src/api/routes/tests/backup.spec.ts @@ -19,11 +19,8 @@ describe("/backups", () => { describe("/api/backups/export", () => { it("should be able to export app", async () => { - const { body, headers } = await config.api.backup.exportBasicBackup( - config.getAppId()! - ) + const body = await config.api.backup.exportBasicBackup(config.getAppId()!) expect(body instanceof Buffer).toBe(true) - expect(headers["content-type"]).toEqual("application/gzip") expect(events.app.exported).toBeCalledTimes(1) }) @@ -38,15 +35,13 @@ describe("/backups", () => { it("should infer the app name from the app", async () => { tk.freeze(mocks.date.MOCK_DATE) - const { headers } = await config.api.backup.exportBasicBackup( - config.getAppId()! - ) - - expect(headers["content-disposition"]).toEqual( - `attachment; filename="${ - config.getApp()!.name - }-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"` - ) + await config.api.backup.exportBasicBackup(config.getAppId()!, { + headers: { + "content-disposition": `attachment; filename="${ + config.getApp().name + }-export-${mocks.date.MOCK_DATE.getTime()}.tar.gz"`, + }, + }) }) }) diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts index 41229b0a2a..032da71b80 100644 --- a/packages/server/src/api/routes/tests/datasource.spec.ts +++ b/packages/server/src/api/routes/tests/datasource.spec.ts @@ -86,7 +86,7 @@ describe("/datasources", () => { }) // check variables in cache let contents = await checkCacheForDynamicVariable( - query._id, + query._id!, "variable3" ) expect(contents.rows.length).toEqual(1) @@ -102,7 +102,7 @@ describe("/datasources", () => { expect(res.body.errors).toBeUndefined() // check variables no longer in cache - contents = await checkCacheForDynamicVariable(query._id, "variable3") + contents = await checkCacheForDynamicVariable(query._id!, "variable3") expect(contents).toBe(null) }) }) diff --git a/packages/server/src/api/routes/tests/permissions.spec.ts b/packages/server/src/api/routes/tests/permissions.spec.ts index 129bc00b44..1eabf6edbb 100644 --- a/packages/server/src/api/routes/tests/permissions.spec.ts +++ b/packages/server/src/api/routes/tests/permissions.spec.ts @@ -45,7 +45,7 @@ describe("/permission", () => { table = (await config.createTable()) as typeof table row = await config.createRow() view = await config.api.viewV2.create({ tableId: table._id }) - perms = await config.api.permission.set({ + perms = await config.api.permission.add({ roleId: STD_ROLE_ID, resourceId: table._id, level: PermissionLevel.READ, @@ -88,13 +88,13 @@ describe("/permission", () => { }) it("should get resource permissions with multiple roles", async () => { - perms = await config.api.permission.set({ + perms = await config.api.permission.add({ roleId: HIGHER_ROLE_ID, resourceId: table._id, level: PermissionLevel.WRITE, }) const res = await config.api.permission.get(table._id) - expect(res.body).toEqual({ + expect(res).toEqual({ permissions: { read: { permissionType: "EXPLICIT", role: STD_ROLE_ID }, write: { permissionType: "EXPLICIT", role: HIGHER_ROLE_ID }, @@ -117,16 +117,19 @@ describe("/permission", () => { level: PermissionLevel.READ, }) - const response = await config.api.permission.set( + await config.api.permission.add( { roleId: STD_ROLE_ID, resourceId: table._id, level: PermissionLevel.EXECUTE, }, - { expectStatus: 403 } - ) - expect(response.message).toEqual( - "You are not allowed to 'read' the resource type 'datasource'" + { + status: 403, + body: { + message: + "You are not allowed to 'read' the resource type 'datasource'", + }, + } ) }) }) @@ -138,9 +141,9 @@ describe("/permission", () => { resourceId: table._id, level: PermissionLevel.READ, }) - expect(res.body[0]._id).toEqual(STD_ROLE_ID) + expect(res[0]._id).toEqual(STD_ROLE_ID) const permsRes = await config.api.permission.get(table._id) - expect(permsRes.body[STD_ROLE_ID]).toBeUndefined() + expect(permsRes.permissions[STD_ROLE_ID]).toBeUndefined() }) it("throw forbidden if the action is not allowed for the resource", async () => { @@ -156,10 +159,13 @@ describe("/permission", () => { resourceId: table._id, level: PermissionLevel.EXECUTE, }, - { expectStatus: 403 } - ) - expect(response.body.message).toEqual( - "You are not allowed to 'read' the resource type 'datasource'" + { + status: 403, + body: { + message: + "You are not allowed to 'read' the resource type 'datasource'", + }, + } ) }) }) @@ -181,10 +187,8 @@ describe("/permission", () => { // replicate changes before checking permissions await config.publish() - const res = await config.api.viewV2.search(view.id, undefined, { - usePublicUser: true, - }) - expect(res.body.rows[0]._id).toEqual(row._id) + const res = await config.api.viewV2.publicSearch(view.id) + expect(res.rows[0]._id).toEqual(row._id) }) it("should not be able to access the view data when the table is not public and there are no view permissions overrides", async () => { @@ -196,14 +200,11 @@ describe("/permission", () => { // replicate changes before checking permissions await config.publish() - await config.api.viewV2.search(view.id, undefined, { - expectStatus: 403, - usePublicUser: true, - }) + await config.api.viewV2.publicSearch(view.id, undefined, { status: 403 }) }) it("should ignore the view permissions if the flag is not on", async () => { - await config.api.permission.set({ + await config.api.permission.add({ roleId: STD_ROLE_ID, resourceId: view.id, level: PermissionLevel.READ, @@ -216,15 +217,14 @@ describe("/permission", () => { // replicate changes before checking permissions await config.publish() - await config.api.viewV2.search(view.id, undefined, { - expectStatus: 403, - usePublicUser: true, + await config.api.viewV2.publicSearch(view.id, undefined, { + status: 403, }) }) it("should use the view permissions if the flag is on", async () => { mocks.licenses.useViewPermissions() - await config.api.permission.set({ + await config.api.permission.add({ roleId: STD_ROLE_ID, resourceId: view.id, level: PermissionLevel.READ, @@ -237,10 +237,8 @@ describe("/permission", () => { // replicate changes before checking permissions await config.publish() - const res = await config.api.viewV2.search(view.id, undefined, { - usePublicUser: true, - }) - expect(res.body.rows[0]._id).toEqual(row._id) + const res = await config.api.viewV2.publicSearch(view.id) + expect(res.rows[0]._id).toEqual(row._id) }) it("shouldn't allow writing from a public user", async () => { @@ -277,7 +275,7 @@ describe("/permission", () => { const res = await config.api.permission.get(legacyView.name) - expect(res.body).toEqual({ + expect(res).toEqual({ permissions: { read: { permissionType: "BASE", diff --git a/packages/server/src/api/routes/tests/queries/query.seq.spec.ts b/packages/server/src/api/routes/tests/queries/query.seq.spec.ts index 52d35fa782..4347ed9044 100644 --- a/packages/server/src/api/routes/tests/queries/query.seq.spec.ts +++ b/packages/server/src/api/routes/tests/queries/query.seq.spec.ts @@ -157,7 +157,7 @@ describe("/queries", () => { }) it("should find a query in cloud", async () => { - await setup.switchToSelfHosted(async () => { + await config.withEnv({ SELF_HOSTED: "true" }, async () => { const query = await config.createQuery() const res = await request .get(`/api/queries/${query._id}`) @@ -397,15 +397,16 @@ describe("/queries", () => { }) it("should fail with invalid integration type", async () => { - const response = await config.api.datasource.create( - { - ...basicDatasource().datasource, - source: "INVALID_INTEGRATION" as SourceName, + const datasource: Datasource = { + ...basicDatasource().datasource, + source: "INVALID_INTEGRATION" as SourceName, + } + await config.api.datasource.create(datasource, { + status: 500, + body: { + message: "No datasource implementation found.", }, - { expectStatus: 500, rawResponse: true } - ) - - expect(response.body.message).toBe("No datasource implementation found.") + }) }) }) @@ -467,7 +468,10 @@ describe("/queries", () => { queryString: "test={{ variable3 }}", }) // check its in cache - const contents = await checkCacheForDynamicVariable(base._id, "variable3") + const contents = await checkCacheForDynamicVariable( + base._id!, + "variable3" + ) expect(contents.rows.length).toEqual(1) const responseBody = await preview(datasource, { path: "www.failonce.com", @@ -490,7 +494,7 @@ describe("/queries", () => { queryString: "test={{ variable3 }}", }) // check its in cache - let contents = await checkCacheForDynamicVariable(base._id, "variable3") + let contents = await checkCacheForDynamicVariable(base._id!, "variable3") expect(contents.rows.length).toEqual(1) // delete the query @@ -500,7 +504,7 @@ describe("/queries", () => { .expect(200) // check variables no longer in cache - contents = await checkCacheForDynamicVariable(base._id, "variable3") + contents = await checkCacheForDynamicVariable(base._id!, "variable3") expect(contents).toBe(null) }) }) diff --git a/packages/server/src/api/routes/tests/role.spec.js b/packages/server/src/api/routes/tests/role.spec.js index a653b573b2..4575f9b213 100644 --- a/packages/server/src/api/routes/tests/role.spec.js +++ b/packages/server/src/api/routes/tests/role.spec.js @@ -93,7 +93,7 @@ describe("/roles", () => { it("should be able to get the role with a permission added", async () => { const table = await config.createTable() - await config.api.permission.set({ + await config.api.permission.add({ roleId: BUILTIN_ROLE_IDS.POWER, resourceId: table._id, level: PermissionLevel.READ, diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts index 239da36351..de411f5397 100644 --- a/packages/server/src/api/routes/tests/row.spec.ts +++ b/packages/server/src/api/routes/tests/row.spec.ts @@ -7,6 +7,7 @@ import { context, InternalTable, roles, tenancy } from "@budibase/backend-core" import { quotas } from "@budibase/pro" import { AutoFieldSubType, + DeleteRow, FieldSchema, FieldType, FieldTypeSubtypes, @@ -106,11 +107,8 @@ describe.each([ mocks.licenses.useCloudFree() }) - const loadRow = (id: string, tbl_Id: string, status = 200) => - config.api.row.get(tbl_Id, id, { expectStatus: status }) - const getRowUsage = async () => { - const { total } = await config.doInContext(null, () => + const { total } = await config.doInContext(undefined, () => quotas.getCurrentUsageValues(QuotaUsageType.STATIC, StaticQuotaName.ROWS) ) return total @@ -235,7 +233,7 @@ describe.each([ const res = await config.api.row.get(tableId, existing._id!) - expect(res.body).toEqual({ + expect(res).toEqual({ ...existing, ...defaultRowFields, }) @@ -265,7 +263,7 @@ describe.each([ await config.createRow() await config.api.row.get(tableId, "1234567", { - expectStatus: 404, + status: 404, }) }) @@ -395,7 +393,7 @@ describe.each([ const createdRow = await config.createRow(row) const id = createdRow._id! - const saved = (await loadRow(id, table._id!)).body + const saved = await config.api.row.get(table._id!, id) expect(saved.stringUndefined).toBe(undefined) expect(saved.stringNull).toBe(null) @@ -476,8 +474,8 @@ describe.each([ ) const row = await config.api.row.get(table._id!, createRowResponse._id!) - expect(row.body.Story).toBeUndefined() - expect(row.body).toEqual({ + expect(row.Story).toBeUndefined() + expect(row).toEqual({ ...defaultRowFields, OrderID: 1111, Country: "Aussy", @@ -524,10 +522,10 @@ describe.each([ expect(row.name).toEqual("Updated Name") expect(row.description).toEqual(existing.description) - const savedRow = await loadRow(row._id!, table._id!) + const savedRow = await config.api.row.get(table._id!, row._id!) - expect(savedRow.body.description).toEqual(existing.description) - expect(savedRow.body.name).toEqual("Updated Name") + expect(savedRow.description).toEqual(existing.description) + expect(savedRow.name).toEqual("Updated Name") await assertRowUsage(rowUsage) }) @@ -543,7 +541,7 @@ describe.each([ tableId: table._id!, name: 1, }, - { expectStatus: 400 } + { status: 400 } ) await assertRowUsage(rowUsage) @@ -582,8 +580,8 @@ describe.each([ }) let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user1._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) + expect(getResp.user1[0]._id).toEqual(user1._id) + expect(getResp.user2[0]._id).toEqual(user2._id) let patchResp = await config.api.row.patch(table._id!, { _id: row._id!, @@ -595,8 +593,8 @@ describe.each([ expect(patchResp.user2[0]._id).toEqual(user2._id) getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user2._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) + expect(getResp.user1[0]._id).toEqual(user2._id) + expect(getResp.user2[0]._id).toEqual(user2._id) }) it("should be able to update relationships when both columns are same name", async () => { @@ -609,7 +607,7 @@ describe.each([ description: "test", relationship: [row._id], }) - row = (await config.api.row.get(table._id!, row._id!)).body + row = await config.api.row.get(table._id!, row._id!) expect(row.relationship.length).toBe(1) const resp = await config.api.row.patch(table._id!, { _id: row._id!, @@ -632,8 +630,10 @@ describe.each([ const createdRow = await config.createRow() const rowUsage = await getRowUsage() - const res = await config.api.row.delete(table._id!, [createdRow]) - expect(res.body[0]._id).toEqual(createdRow._id) + const res = await config.api.row.bulkDelete(table._id!, { + rows: [createdRow], + }) + expect(res[0]._id).toEqual(createdRow._id) await assertRowUsage(rowUsage - 1) }) }) @@ -682,10 +682,12 @@ describe.each([ const row2 = await config.createRow() const rowUsage = await getRowUsage() - const res = await config.api.row.delete(table._id!, [row1, row2]) + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2], + }) - expect(res.body.length).toEqual(2) - await loadRow(row1._id!, table._id!, 404) + expect(res.length).toEqual(2) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) await assertRowUsage(rowUsage - 2) }) @@ -697,14 +699,12 @@ describe.each([ ]) const rowUsage = await getRowUsage() - const res = await config.api.row.delete(table._id!, [ - row1, - row2._id, - { _id: row3._id }, - ]) + const res = await config.api.row.bulkDelete(table._id!, { + rows: [row1, row2._id!, { _id: row3._id }], + }) - expect(res.body.length).toEqual(3) - await loadRow(row1._id!, table._id!, 404) + expect(res.length).toEqual(3) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) await assertRowUsage(rowUsage - 3) }) @@ -712,34 +712,36 @@ describe.each([ const row1 = await config.createRow() const rowUsage = await getRowUsage() - const res = await config.api.row.delete(table._id!, row1) + const res = await config.api.row.delete(table._id!, row1 as DeleteRow) - expect(res.body.id).toEqual(row1._id) - await loadRow(row1._id!, table._id!, 404) + expect(res.id).toEqual(row1._id) + await config.api.row.get(table._id!, row1._id!, { status: 404 }) await assertRowUsage(rowUsage - 1) }) it("Should ignore malformed/invalid delete requests", async () => { const rowUsage = await getRowUsage() - const res = await config.api.row.delete( - table._id!, - { not: "valid" }, - { expectStatus: 400 } - ) - expect(res.body.message).toEqual("Invalid delete rows request") - - const res2 = await config.api.row.delete( - table._id!, - { rows: 123 }, - { expectStatus: 400 } - ) - expect(res2.body.message).toEqual("Invalid delete rows request") - - const res3 = await config.api.row.delete(table._id!, "invalid", { - expectStatus: 400, + await config.api.row.delete(table._id!, { not: "valid" } as any, { + status: 400, + body: { + message: "Invalid delete rows request", + }, + }) + + await config.api.row.delete(table._id!, { rows: 123 } as any, { + status: 400, + body: { + message: "Invalid delete rows request", + }, + }) + + await config.api.row.delete(table._id!, "invalid" as any, { + status: 400, + body: { + message: "Invalid delete rows request", + }, }) - expect(res3.body.message).toEqual("Invalid delete rows request") await assertRowUsage(rowUsage) }) @@ -757,16 +759,16 @@ describe.each([ const row = await config.createRow() const rowUsage = await getRowUsage() - const res = await config.api.legacyView.get(table._id!) - expect(res.body.length).toEqual(1) - expect(res.body[0]._id).toEqual(row._id) + const rows = await config.api.legacyView.get(table._id!) + expect(rows.length).toEqual(1) + expect(rows[0]._id).toEqual(row._id) await assertRowUsage(rowUsage) }) it("should throw an error if view doesn't exist", async () => { const rowUsage = await getRowUsage() - await config.api.legacyView.get("derp", { expectStatus: 404 }) + await config.api.legacyView.get("derp", { status: 404 }) await assertRowUsage(rowUsage) }) @@ -781,9 +783,9 @@ describe.each([ const row = await config.createRow() const rowUsage = await getRowUsage() - const res = await config.api.legacyView.get(view.name) - expect(res.body.length).toEqual(1) - expect(res.body[0]._id).toEqual(row._id) + const rows = await config.api.legacyView.get(view.name) + expect(rows.length).toEqual(1) + expect(rows[0]._id).toEqual(row._id) await assertRowUsage(rowUsage) }) @@ -841,8 +843,8 @@ describe.each([ linkedTable._id!, secondRow._id! ) - expect(resBasic.body.link.length).toBe(1) - expect(resBasic.body.link[0]).toEqual({ + expect(resBasic.link.length).toBe(1) + expect(resBasic.link[0]).toEqual({ _id: firstRow._id, primaryDisplay: firstRow.name, }) @@ -852,10 +854,10 @@ describe.each([ linkedTable._id!, secondRow._id! ) - expect(resEnriched.body.link.length).toBe(1) - expect(resEnriched.body.link[0]._id).toBe(firstRow._id) - expect(resEnriched.body.link[0].name).toBe("Test Contact") - expect(resEnriched.body.link[0].description).toBe("original description") + expect(resEnriched.link.length).toBe(1) + expect(resEnriched.link[0]._id).toBe(firstRow._id) + expect(resEnriched.link[0].name).toBe("Test Contact") + expect(resEnriched.link[0].description).toBe("original description") await assertRowUsage(rowUsage) }) }) @@ -880,8 +882,7 @@ describe.each([ ], tableId: table._id, }) - // the environment needs configured for this - await setup.switchToSelfHosted(async () => { + await config.withEnv({ SELF_HOSTED: "true" }, async () => { return context.doInAppContext(config.getAppId(), async () => { const enriched = await outputProcessing(table, [row]) expect((enriched as Row[])[0].attachment[0].url).toBe( @@ -903,7 +904,7 @@ describe.each([ const res = await config.api.row.exportRows(table._id!, { rows: [existing._id!], }) - const results = JSON.parse(res.text) + const results = JSON.parse(res) expect(results.length).toEqual(1) const row = results[0] @@ -922,7 +923,7 @@ describe.each([ rows: [existing._id!], columns: ["_id"], }) - const results = JSON.parse(res.text) + const results = JSON.parse(res) expect(results.length).toEqual(1) const row = results[0] @@ -1000,7 +1001,7 @@ describe.each([ }) const row = await config.api.row.get(table._id!, newRow._id!) - expect(row.body).toEqual({ + expect(row).toEqual({ name: data.name, surname: data.surname, address: data.address, @@ -1010,9 +1011,9 @@ describe.each([ id: newRow.id, ...defaultRowFields, }) - expect(row.body._viewId).toBeUndefined() - expect(row.body.age).toBeUndefined() - expect(row.body.jobTitle).toBeUndefined() + expect(row._viewId).toBeUndefined() + expect(row.age).toBeUndefined() + expect(row.jobTitle).toBeUndefined() }) }) @@ -1042,7 +1043,7 @@ describe.each([ }) const row = await config.api.row.get(tableId, newRow._id!) - expect(row.body).toEqual({ + expect(row).toEqual({ ...newRow, name: newData.name, address: newData.address, @@ -1051,9 +1052,9 @@ describe.each([ id: newRow.id, ...defaultRowFields, }) - expect(row.body._viewId).toBeUndefined() - expect(row.body.age).toBeUndefined() - expect(row.body.jobTitle).toBeUndefined() + expect(row._viewId).toBeUndefined() + expect(row.age).toBeUndefined() + expect(row.jobTitle).toBeUndefined() }) }) @@ -1071,12 +1072,12 @@ describe.each([ const createdRow = await config.createRow() const rowUsage = await getRowUsage() - await config.api.row.delete(view.id, [createdRow]) + await config.api.row.bulkDelete(view.id, { rows: [createdRow] }) await assertRowUsage(rowUsage - 1) await config.api.row.get(tableId, createdRow._id!, { - expectStatus: 404, + status: 404, }) }) @@ -1097,17 +1098,17 @@ describe.each([ ]) const rowUsage = await getRowUsage() - await config.api.row.delete(view.id, [rows[0], rows[2]]) + await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] }) await assertRowUsage(rowUsage - 2) await config.api.row.get(tableId, rows[0]._id!, { - expectStatus: 404, + status: 404, }) await config.api.row.get(tableId, rows[2]._id!, { - expectStatus: 404, + status: 404, }) - await config.api.row.get(tableId, rows[1]._id!, { expectStatus: 200 }) + await config.api.row.get(tableId, rows[1]._id!, { status: 200 }) }) }) @@ -1154,8 +1155,8 @@ describe.each([ const createViewResponse = await config.createView() const response = await config.api.viewV2.search(createViewResponse.id) - expect(response.body.rows).toHaveLength(10) - expect(response.body).toEqual({ + expect(response.rows).toHaveLength(10) + expect(response).toEqual({ rows: expect.arrayContaining( rows.map(r => ({ _viewId: createViewResponse.id, @@ -1206,8 +1207,8 @@ describe.each([ const response = await config.api.viewV2.search(createViewResponse.id) - expect(response.body.rows).toHaveLength(5) - expect(response.body).toEqual({ + expect(response.rows).toHaveLength(5) + expect(response).toEqual({ rows: expect.arrayContaining( expectedRows.map(r => ({ _viewId: createViewResponse.id, @@ -1328,8 +1329,8 @@ describe.each([ createViewResponse.id ) - expect(response.body.rows).toHaveLength(4) - expect(response.body.rows).toEqual( + expect(response.rows).toHaveLength(4) + expect(response.rows).toEqual( expected.map(name => expect.objectContaining({ name })) ) } @@ -1357,8 +1358,8 @@ describe.each([ } ) - expect(response.body.rows).toHaveLength(4) - expect(response.body.rows).toEqual( + expect(response.rows).toHaveLength(4) + expect(response.rows).toEqual( expected.map(name => expect.objectContaining({ name })) ) } @@ -1382,8 +1383,8 @@ describe.each([ }) const response = await config.api.viewV2.search(view.id) - expect(response.body.rows).toHaveLength(10) - expect(response.body.rows).toEqual( + expect(response.rows).toHaveLength(10) + expect(response.rows).toEqual( expect.arrayContaining( rows.map(r => ({ ...(isInternal @@ -1402,7 +1403,7 @@ describe.each([ const createViewResponse = await config.createView() const response = await config.api.viewV2.search(createViewResponse.id) - expect(response.body.rows).toHaveLength(0) + expect(response.rows).toHaveLength(0) }) it("respects the limit parameter", async () => { @@ -1417,7 +1418,7 @@ describe.each([ query: {}, }) - expect(response.body.rows).toHaveLength(limit) + expect(response.rows).toHaveLength(limit) }) it("can handle pagination", async () => { @@ -1426,7 +1427,7 @@ describe.each([ const createViewResponse = await config.createView() const allRows = (await config.api.viewV2.search(createViewResponse.id)) - .body.rows + .rows const firstPageResponse = await config.api.viewV2.search( createViewResponse.id, @@ -1436,7 +1437,7 @@ describe.each([ query: {}, } ) - expect(firstPageResponse.body).toEqual({ + expect(firstPageResponse).toEqual({ rows: expect.arrayContaining(allRows.slice(0, 4)), totalRows: isInternal ? 10 : undefined, hasNextPage: true, @@ -1448,12 +1449,12 @@ describe.each([ { paginate: true, limit: 4, - bookmark: firstPageResponse.body.bookmark, + bookmark: firstPageResponse.bookmark, query: {}, } ) - expect(secondPageResponse.body).toEqual({ + expect(secondPageResponse).toEqual({ rows: expect.arrayContaining(allRows.slice(4, 8)), totalRows: isInternal ? 10 : undefined, hasNextPage: true, @@ -1465,11 +1466,11 @@ describe.each([ { paginate: true, limit: 4, - bookmark: secondPageResponse.body.bookmark, + bookmark: secondPageResponse.bookmark, query: {}, } ) - expect(lastPageResponse.body).toEqual({ + expect(lastPageResponse).toEqual({ rows: expect.arrayContaining(allRows.slice(8)), totalRows: isInternal ? 10 : undefined, hasNextPage: false, @@ -1489,7 +1490,7 @@ describe.each([ email: "joe@joe.com", roles: {}, }, - { expectStatus: 400 } + { status: 400 } ) expect(response.message).toBe("Cannot create new user entry.") }) @@ -1516,58 +1517,52 @@ describe.each([ it("does not allow public users to fetch by default", async () => { await config.publish() - await config.api.viewV2.search(viewId, undefined, { - expectStatus: 403, - usePublicUser: true, + await config.api.viewV2.publicSearch(viewId, undefined, { + status: 403, }) }) it("allow public users to fetch when permissions are explicit", async () => { - await config.api.permission.set({ + await config.api.permission.add({ roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, level: PermissionLevel.READ, resourceId: viewId, }) await config.publish() - const response = await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, - }) + const response = await config.api.viewV2.publicSearch(viewId) - expect(response.body.rows).toHaveLength(10) + expect(response.rows).toHaveLength(10) }) it("allow public users to fetch when permissions are inherited", async () => { - await config.api.permission.set({ + await config.api.permission.add({ roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, level: PermissionLevel.READ, resourceId: tableId, }) await config.publish() - const response = await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, - }) + const response = await config.api.viewV2.publicSearch(viewId) - expect(response.body.rows).toHaveLength(10) + expect(response.rows).toHaveLength(10) }) it("respects inherited permissions, not allowing not public views from public tables", async () => { - await config.api.permission.set({ + await config.api.permission.add({ roleId: roles.BUILTIN_ROLE_IDS.PUBLIC, level: PermissionLevel.READ, resourceId: tableId, }) - await config.api.permission.set({ + await config.api.permission.add({ roleId: roles.BUILTIN_ROLE_IDS.POWER, level: PermissionLevel.READ, resourceId: viewId, }) await config.publish() - await config.api.viewV2.search(viewId, undefined, { - usePublicUser: true, - expectStatus: 403, + await config.api.viewV2.publicSearch(viewId, undefined, { + status: 403, }) }) }) @@ -1754,7 +1749,7 @@ describe.each([ } const row = await config.api.row.save(tableId, rowData) - const { body: retrieved } = await config.api.row.get(tableId, row._id!) + const retrieved = await config.api.row.get(tableId, row._id!) expect(retrieved).toEqual({ name: rowData.name, description: rowData.description, @@ -1781,7 +1776,7 @@ describe.each([ } const row = await config.api.row.save(tableId, rowData) - const { body: retrieved } = await config.api.row.get(tableId, row._id!) + const retrieved = await config.api.row.get(tableId, row._id!) expect(retrieved).toEqual({ name: rowData.name, description: rowData.description, diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts index ce119e56f0..77704a0408 100644 --- a/packages/server/src/api/routes/tests/table.spec.ts +++ b/packages/server/src/api/routes/tests/table.spec.ts @@ -26,6 +26,7 @@ import { TableToBuild } from "../../../tests/utilities/TestConfiguration" tk.freeze(mocks.date.MOCK_DATE) const { basicTable } = setup.structures +const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/ describe("/tables", () => { let request = setup.getRequest() @@ -285,6 +286,35 @@ describe("/tables", () => { expect(res.body.schema.roleId).toBeDefined() }) }) + + it("should add a new column for an internal DB table", async () => { + const saveTableRequest: SaveTableRequest = { + _add: { + name: "NEW_COLUMN", + }, + ...basicTable(), + } + + const response = await request + .post(`/api/tables`) + .send(saveTableRequest) + .set(config.defaultHeaders()) + .expect("Content-Type", /json/) + .expect(200) + + const expectedResponse = { + ...saveTableRequest, + _rev: expect.stringMatching(/^\d-.+/), + _id: expect.stringMatching(/^ta_.+/), + createdAt: expect.stringMatching(ISO_REGEX_PATTERN), + updatedAt: expect.stringMatching(ISO_REGEX_PATTERN), + views: {}, + } + delete expectedResponse._add + + expect(response.status).toBe(200) + expect(response.body).toEqual(expectedResponse) + }) }) describe("import", () => { @@ -663,8 +693,7 @@ describe("/tables", () => { expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined() - const resp = await config.api.row.get(table._id!, testRow._id!) - const migratedRow = resp.body as Row + const migratedRow = await config.api.row.get(table._id!, testRow._id!) expect(migratedRow["user column"]).toBeDefined() expect(migratedRow["user relationship"]).not.toBeDefined() @@ -716,15 +745,13 @@ describe("/tables", () => { expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined() - const row1Migrated = (await config.api.row.get(table._id!, row1._id!)) - .body as Row + const row1Migrated = await config.api.row.get(table._id!, row1._id!) expect(row1Migrated["user relationship"]).not.toBeDefined() expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( expect.arrayContaining([users[0]._id, users[1]._id]) ) - const row2Migrated = (await config.api.row.get(table._id!, row2._id!)) - .body as Row + const row2Migrated = await config.api.row.get(table._id!, row2._id!) expect(row2Migrated["user relationship"]).not.toBeDefined() expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual( expect.arrayContaining([users[1]._id, users[2]._id]) @@ -773,15 +800,13 @@ describe("/tables", () => { expect(migratedTable.schema["user column"]).toBeDefined() expect(migratedTable.schema["user relationship"]).not.toBeDefined() - const row1Migrated = (await config.api.row.get(table._id!, row1._id!)) - .body as Row + const row1Migrated = await config.api.row.get(table._id!, row1._id!) expect(row1Migrated["user relationship"]).not.toBeDefined() expect(row1Migrated["user column"].map((r: Row) => r._id)).toEqual( expect.arrayContaining([users[0]._id, users[1]._id]) ) - const row2Migrated = (await config.api.row.get(table._id!, row2._id!)) - .body as Row + const row2Migrated = await config.api.row.get(table._id!, row2._id!) expect(row2Migrated["user relationship"]).not.toBeDefined() expect(row2Migrated["user column"].map((r: Row) => r._id)).toEqual([ users[2]._id, @@ -831,7 +856,7 @@ describe("/tables", () => { subtype: FieldSubtype.USERS, }, }, - { expectStatus: 400 } + { status: 400 } ) }) @@ -846,7 +871,7 @@ describe("/tables", () => { subtype: FieldSubtype.USERS, }, }, - { expectStatus: 400 } + { status: 400 } ) }) @@ -861,7 +886,7 @@ describe("/tables", () => { subtype: FieldSubtype.USERS, }, }, - { expectStatus: 400 } + { status: 400 } ) }) @@ -880,7 +905,7 @@ describe("/tables", () => { subtype: FieldSubtype.USERS, }, }, - { expectStatus: 400 } + { status: 400 } ) }) }) diff --git a/packages/server/src/api/routes/tests/user.spec.ts b/packages/server/src/api/routes/tests/user.spec.ts index e6349099d7..ff8c0d54b3 100644 --- a/packages/server/src/api/routes/tests/user.spec.ts +++ b/packages/server/src/api/routes/tests/user.spec.ts @@ -27,15 +27,17 @@ describe("/users", () => { describe("fetch", () => { it("returns a list of users from an instance db", async () => { - await config.createUser({ id: "uuidx" }) - await config.createUser({ id: "uuidy" }) + const id1 = `us_${utils.newid()}` + const id2 = `us_${utils.newid()}` + await config.createUser({ _id: id1 }) + await config.createUser({ _id: id2 }) const res = await config.api.user.fetch() expect(res.length).toBe(3) const ids = res.map(u => u._id) - expect(ids).toContain(`ro_ta_users_us_uuidx`) - expect(ids).toContain(`ro_ta_users_us_uuidy`) + expect(ids).toContain(`ro_ta_users_${id1}`) + expect(ids).toContain(`ro_ta_users_${id2}`) }) it("should apply authorization to endpoint", async () => { @@ -54,7 +56,7 @@ describe("/users", () => { describe("update", () => { it("should be able to update the user", async () => { const user: UserMetadata = await config.createUser({ - id: `us_update${utils.newid()}`, + _id: `us_update${utils.newid()}`, }) user.roleId = roles.BUILTIN_ROLE_IDS.BASIC delete user._rev @@ -88,7 +90,7 @@ describe("/users", () => { }) await config.api.user.update( { ...user, roleId: roles.BUILTIN_ROLE_IDS.POWER }, - { expectStatus: 409 } + { status: 409 } ) }) }) diff --git a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts index 53e90396aa..8a843551ac 100644 --- a/packages/server/src/api/routes/tests/utilities/TestFunctions.ts +++ b/packages/server/src/api/routes/tests/utilities/TestFunctions.ts @@ -4,6 +4,7 @@ import { AppStatus } from "../../../../db/utils" import { roles, tenancy, context, db } from "@budibase/backend-core" import env from "../../../../environment" import Nano from "@budibase/nano" +import TestConfiguration from "src/tests/utilities/TestConfiguration" class Request { appId: any @@ -52,10 +53,10 @@ export const clearAllApps = async ( }) } -export const clearAllAutomations = async (config: any) => { +export const clearAllAutomations = async (config: TestConfiguration) => { const automations = await config.getAllAutomations() for (let auto of automations) { - await context.doInAppContext(config.appId, async () => { + await context.doInAppContext(config.getAppId(), async () => { await config.deleteAutomation(auto) }) } @@ -101,7 +102,12 @@ export const checkBuilderEndpoint = async ({ method, url, body, -}: any) => { +}: { + config: TestConfiguration + method: string + url: string + body?: any +}) => { const headers = await config.login({ userId: "us_fail", builder: false, diff --git a/packages/server/src/api/routes/tests/utilities/index.ts b/packages/server/src/api/routes/tests/utilities/index.ts index 27c178fc38..dcb8ccd6c0 100644 --- a/packages/server/src/api/routes/tests/utilities/index.ts +++ b/packages/server/src/api/routes/tests/utilities/index.ts @@ -1,5 +1,4 @@ -import TestConfig from "../../../../tests/utilities/TestConfiguration" -import env from "../../../../environment" +import TestConfiguration from "../../../../tests/utilities/TestConfiguration" import supertest from "supertest" export * as structures from "../../../../tests/utilities/structures" @@ -47,10 +46,10 @@ export function delay(ms: number) { } let request: supertest.SuperTest | undefined | null, - config: TestConfig | null + config: TestConfiguration | null export function beforeAll() { - config = new TestConfig() + config = new TestConfiguration() request = config.getRequest() } @@ -77,21 +76,3 @@ export function getConfig() { } return config! } - -export async function switchToSelfHosted(func: any) { - // self hosted stops any attempts to Dynamo - env._set("NODE_ENV", "production") - env._set("SELF_HOSTED", true) - let error - try { - await func() - } catch (err) { - error = err - } - env._set("NODE_ENV", "jest") - env._set("SELF_HOSTED", false) - // don't throw error until after reset - if (error) { - throw error - } -} diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts index b03a73ddda..5198e63338 100644 --- a/packages/server/src/api/routes/tests/viewV2.spec.ts +++ b/packages/server/src/api/routes/tests/viewV2.spec.ts @@ -177,7 +177,7 @@ describe.each([ } await config.api.viewV2.create(newView, { - expectStatus: 201, + status: 201, }) }) }) @@ -275,7 +275,7 @@ describe.each([ const tableId = table._id! await config.api.viewV2.update( { ...view, id: generator.guid() }, - { expectStatus: 404 } + { status: 404 } ) expect(await config.api.table.get(tableId)).toEqual( @@ -304,7 +304,7 @@ describe.each([ }, ], }, - { expectStatus: 404 } + { status: 404 } ) expect(await config.api.table.get(tableId)).toEqual( @@ -326,12 +326,10 @@ describe.each([ ...viewV1, }, { - expectStatus: 400, - handleResponse: r => { - expect(r.body).toEqual({ - message: "Only views V2 can be updated", - status: 400, - }) + status: 400, + body: { + message: "Only views V2 can be updated", + status: 400, }, } ) @@ -403,7 +401,7 @@ describe.each([ } as Record, }, { - expectStatus: 200, + status: 200, } ) }) diff --git a/packages/server/src/api/routes/tests/webhook.spec.ts b/packages/server/src/api/routes/tests/webhook.spec.ts index 38f84852b4..48a6da38bf 100644 --- a/packages/server/src/api/routes/tests/webhook.spec.ts +++ b/packages/server/src/api/routes/tests/webhook.spec.ts @@ -36,7 +36,7 @@ describe("/webhooks", () => { const automation = await config.createAutomation() const res = await request .put(`/api/webhooks`) - .send(basicWebhook(automation._id)) + .send(basicWebhook(automation._id!)) .set(config.defaultHeaders()) .expect("Content-Type", /json/) .expect(200) @@ -145,7 +145,7 @@ describe("/webhooks", () => { let automation = collectAutomation() let newAutomation = await config.createAutomation(automation) let syncWebhook = await config.createWebhook( - basicWebhook(newAutomation._id) + basicWebhook(newAutomation._id!) ) // replicate changes before checking webhook diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts index 4e84422dec..aa96a30b00 100644 --- a/packages/server/src/app.ts +++ b/packages/server/src/app.ts @@ -29,6 +29,6 @@ start().catch(err => { throw err }) -export function getServer() { +export function getServer(): Server { return server } diff --git a/packages/server/src/appMigrations/tests/migrations.spec.ts b/packages/server/src/appMigrations/tests/migrations.spec.ts index 5eb8535695..7af2346934 100644 --- a/packages/server/src/appMigrations/tests/migrations.spec.ts +++ b/packages/server/src/appMigrations/tests/migrations.spec.ts @@ -30,9 +30,9 @@ describe("migrations", () => { const appId = config.getAppId() - const response = await config.api.application.getRaw(appId) - - expect(response.headers[Header.MIGRATING_APP]).toBeUndefined() + await config.api.application.get(appId, { + headersNotPresent: [Header.MIGRATING_APP], + }) }) it("accessing an app that has pending migrations will attach the migrating header", async () => { @@ -46,8 +46,10 @@ describe("migrations", () => { func: async () => {}, }) - const response = await config.api.application.getRaw(appId) - - expect(response.headers[Header.MIGRATING_APP]).toEqual(appId) + await config.api.application.get(appId, { + headers: { + [Header.MIGRATING_APP]: appId, + }, + }) }) }) diff --git a/packages/server/src/automations/tests/createRow.spec.ts b/packages/server/src/automations/tests/createRow.spec.ts index 0615fcdd97..0098be39a5 100644 --- a/packages/server/src/automations/tests/createRow.spec.ts +++ b/packages/server/src/automations/tests/createRow.spec.ts @@ -24,7 +24,7 @@ describe("test the create row action", () => { expect(res.id).toBeDefined() expect(res.revision).toBeDefined() expect(res.success).toEqual(true) - const gottenRow = await config.getRow(table._id, res.id) + const gottenRow = await config.api.row.get(table._id, res.id) expect(gottenRow.name).toEqual("test") expect(gottenRow.description).toEqual("test") }) diff --git a/packages/server/src/automations/tests/updateRow.spec.ts b/packages/server/src/automations/tests/updateRow.spec.ts index b64c52147d..76823e8a11 100644 --- a/packages/server/src/automations/tests/updateRow.spec.ts +++ b/packages/server/src/automations/tests/updateRow.spec.ts @@ -36,7 +36,7 @@ describe("test the update row action", () => { it("should be able to run the action", async () => { const res = await setup.runStep(setup.actions.UPDATE_ROW.stepId, inputs) expect(res.success).toEqual(true) - const updatedRow = await config.getRow(table._id!, res.id) + const updatedRow = await config.api.row.get(table._id!, res.id) expect(updatedRow.name).toEqual("Updated name") expect(updatedRow.description).not.toEqual("") }) @@ -87,8 +87,8 @@ describe("test the update row action", () => { }) let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user1._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) + expect(getResp.user1[0]._id).toEqual(user1._id) + expect(getResp.user2[0]._id).toEqual(user2._id) let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, { rowId: row._id, @@ -103,8 +103,8 @@ describe("test the update row action", () => { expect(stepResp.success).toEqual(true) getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user2._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) + expect(getResp.user1[0]._id).toEqual(user2._id) + expect(getResp.user2[0]._id).toEqual(user2._id) }) it("should overwrite links if those links are not set and we ask it do", async () => { @@ -140,8 +140,8 @@ describe("test the update row action", () => { }) let getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user1._id) - expect(getResp.body.user2[0]._id).toEqual(user2._id) + expect(getResp.user1[0]._id).toEqual(user1._id) + expect(getResp.user2[0]._id).toEqual(user2._id) let stepResp = await setup.runStep(setup.actions.UPDATE_ROW.stepId, { rowId: row._id, @@ -163,7 +163,7 @@ describe("test the update row action", () => { expect(stepResp.success).toEqual(true) getResp = await config.api.row.get(table._id!, row._id!) - expect(getResp.body.user1[0]._id).toEqual(user2._id) - expect(getResp.body.user2).toBeUndefined() + expect(getResp.user1[0]._id).toEqual(user2._id) + expect(getResp.user2).toBeUndefined() }) }) diff --git a/packages/server/src/constants/layouts.ts b/packages/server/src/constants/layouts.ts index 835a5d2e15..f4eb337c2d 100644 --- a/packages/server/src/constants/layouts.ts +++ b/packages/server/src/constants/layouts.ts @@ -1,9 +1,11 @@ +import { Layout } from "@budibase/types" + export const BASE_LAYOUT_PROP_IDS = { PRIVATE: "layout_private_master", PUBLIC: "layout_public_master", } -export const EMPTY_LAYOUT = { +export const EMPTY_LAYOUT: Layout = { componentLibraries: ["@budibase/standard-components"], title: "{{ name }}", favicon: "./_shared/favicon.png", diff --git a/packages/server/src/constants/screens.ts b/packages/server/src/constants/screens.ts index 6c88b0f957..1107289ea0 100644 --- a/packages/server/src/constants/screens.ts +++ b/packages/server/src/constants/screens.ts @@ -1,5 +1,6 @@ import { roles } from "@budibase/backend-core" import { BASE_LAYOUT_PROP_IDS } from "./layouts" +import { Screen } from "@budibase/types" export function createHomeScreen( config: { @@ -9,10 +10,8 @@ export function createHomeScreen( roleId: roles.BUILTIN_ROLE_IDS.BASIC, route: "/", } -) { +): Screen { return { - description: "", - url: "", layoutId: BASE_LAYOUT_PROP_IDS.PRIVATE, props: { _id: "d834fea2-1b3e-4320-ab34-f9009f5ecc59", diff --git a/packages/server/src/db/defaultData/datasource_bb_default.ts b/packages/server/src/db/defaultData/datasource_bb_default.ts index ac540cd8fc..03aed3c118 100644 --- a/packages/server/src/db/defaultData/datasource_bb_default.ts +++ b/packages/server/src/db/defaultData/datasource_bb_default.ts @@ -1,8 +1,8 @@ import { DEFAULT_BB_DATASOURCE_ID, - DEFAULT_INVENTORY_TABLE_ID, DEFAULT_EMPLOYEE_TABLE_ID, DEFAULT_EXPENSES_TABLE_ID, + DEFAULT_INVENTORY_TABLE_ID, DEFAULT_JOBS_TABLE_ID, } from "../../constants" import { importToRows } from "../../api/controllers/table/utils" @@ -15,19 +15,21 @@ import { expensesImport } from "./expensesImport" import { db as dbCore } from "@budibase/backend-core" import { AutoFieldSubType, + Datasource, FieldType, RelationshipType, Row, + SourceName, Table, TableSchema, TableSourceType, } from "@budibase/types" -const defaultDatasource = { +const defaultDatasource: Datasource = { _id: DEFAULT_BB_DATASOURCE_ID, type: dbCore.BUDIBASE_DATASOURCE_TYPE, name: "Sample Data", - source: "BUDIBASE", + source: SourceName.BUDIBASE, config: {}, } diff --git a/packages/server/src/db/tests/linkController.spec.ts b/packages/server/src/db/tests/linkController.spec.ts index ae1922db27..4f41fd3838 100644 --- a/packages/server/src/db/tests/linkController.spec.ts +++ b/packages/server/src/db/tests/linkController.spec.ts @@ -100,7 +100,7 @@ describe("test the link controller", () => { const { _id } = await config.createRow( basicLinkedRow(t1._id!, row._id!, linkField) ) - return config.getRow(t1._id!, _id!) + return config.api.row.get(t1._id!, _id!) } it("should be able to confirm if two table schemas are equal", async () => { diff --git a/packages/server/src/db/utils.ts b/packages/server/src/db/utils.ts index 35d9b69e96..983cbf423c 100644 --- a/packages/server/src/db/utils.ts +++ b/packages/server/src/db/utils.ts @@ -1,13 +1,15 @@ import newid from "./newid" import { db as dbCore } from "@budibase/backend-core" import { - FieldType, + DatabaseQueryOpts, + Datasource, DocumentType, FieldSchema, - RelationshipFieldMetadata, - VirtualDocumentType, + FieldType, INTERNAL_TABLE_SOURCE_ID, - DatabaseQueryOpts, + RelationshipFieldMetadata, + SourceName, + VirtualDocumentType, } from "@budibase/types" export { DocumentType, VirtualDocumentType } from "@budibase/types" @@ -20,11 +22,11 @@ export const enum AppStatus { DEPLOYED = "published", } -export const BudibaseInternalDB = { +export const BudibaseInternalDB: Datasource = { _id: INTERNAL_TABLE_SOURCE_ID, type: dbCore.BUDIBASE_DATASOURCE_TYPE, name: "Budibase DB", - source: "BUDIBASE", + source: SourceName.BUDIBASE, config: {}, } diff --git a/packages/server/src/environment.ts b/packages/server/src/environment.ts index d0b7e91401..a7c6df29ea 100644 --- a/packages/server/src/environment.ts +++ b/packages/server/src/environment.ts @@ -76,13 +76,16 @@ const environment = { DEFAULTS.AUTOMATION_THREAD_TIMEOUT > QUERY_THREAD_TIMEOUT ? DEFAULTS.AUTOMATION_THREAD_TIMEOUT : QUERY_THREAD_TIMEOUT, - SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, BB_ADMIN_USER_EMAIL: process.env.BB_ADMIN_USER_EMAIL, BB_ADMIN_USER_PASSWORD: process.env.BB_ADMIN_USER_PASSWORD, PLUGINS_DIR: process.env.PLUGINS_DIR || DEFAULTS.PLUGINS_DIR, OPENAI_API_KEY: process.env.OPENAI_API_KEY, MAX_IMPORT_SIZE_MB: process.env.MAX_IMPORT_SIZE_MB, SESSION_EXPIRY_SECONDS: process.env.SESSION_EXPIRY_SECONDS, + // SQL + SQL_MAX_ROWS: process.env.SQL_MAX_ROWS, + SQL_LOGGING_ENABLE: process.env.SQL_LOGGING_ENABLE, + SQL_ALIASING_DISABLE: process.env.SQL_ALIASING_DISABLE, // flags ALLOW_DEV_AUTOMATIONS: process.env.ALLOW_DEV_AUTOMATIONS, DISABLE_THREADING: process.env.DISABLE_THREADING, diff --git a/packages/server/src/integration-test/mysql.spec.ts b/packages/server/src/integration-test/mysql.spec.ts new file mode 100644 index 0000000000..fac2bfcfeb --- /dev/null +++ b/packages/server/src/integration-test/mysql.spec.ts @@ -0,0 +1,363 @@ +import fetch from "node-fetch" +import { + generateMakeRequest, + MakeRequestResponse, +} from "../api/routes/public/tests/utils" +import { v4 as uuidv4 } from "uuid" +import * as setup from "../api/routes/tests/utilities" +import { + Datasource, + FieldType, + Table, + TableRequest, + TableSourceType, +} from "@budibase/types" +import _ from "lodash" +import { databaseTestProviders } from "../integrations/tests/utils" +import mysql from "mysql2/promise" +import { builderSocket } from "../websockets" +// @ts-ignore +fetch.mockSearch() + +const config = setup.getConfig()! + +jest.unmock("mysql2/promise") +jest.mock("../websockets", () => ({ + clientAppSocket: jest.fn(), + gridAppSocket: jest.fn(), + initialise: jest.fn(), + builderSocket: { + emitTableUpdate: jest.fn(), + emitTableDeletion: jest.fn(), + emitDatasourceUpdate: jest.fn(), + emitDatasourceDeletion: jest.fn(), + emitScreenUpdate: jest.fn(), + emitAppMetadataUpdate: jest.fn(), + emitAppPublish: jest.fn(), + }, +})) + +describe("mysql integrations", () => { + let makeRequest: MakeRequestResponse, + mysqlDatasource: Datasource, + primaryMySqlTable: Table + + beforeAll(async () => { + await config.init() + const apiKey = await config.generateApiKey() + + makeRequest = generateMakeRequest(apiKey, true) + + mysqlDatasource = await config.api.datasource.create( + await databaseTestProviders.mysql.datasource() + ) + }) + + afterAll(async () => { + await databaseTestProviders.mysql.stop() + }) + + beforeEach(async () => { + primaryMySqlTable = await config.createTable({ + name: uuidv4(), + type: "table", + primary: ["id"], + schema: { + id: { + name: "id", + type: FieldType.AUTO, + autocolumn: true, + }, + name: { + name: "name", + type: FieldType.STRING, + }, + description: { + name: "description", + type: FieldType.STRING, + }, + value: { + name: "value", + type: FieldType.NUMBER, + }, + }, + sourceId: mysqlDatasource._id, + sourceType: TableSourceType.EXTERNAL, + }) + }) + + afterAll(config.end) + + it("validate table schema", async () => { + const res = await makeRequest( + "get", + `/api/datasources/${mysqlDatasource._id}` + ) + + expect(res.status).toBe(200) + expect(res.body).toEqual({ + config: { + database: "mysql", + host: mysqlDatasource.config!.host, + password: "--secret-value--", + port: mysqlDatasource.config!.port, + user: "root", + }, + plus: true, + source: "MYSQL", + type: "datasource_plus", + _id: expect.any(String), + _rev: expect.any(String), + createdAt: expect.any(String), + updatedAt: expect.any(String), + entities: expect.any(Object), + }) + }) + + describe("POST /api/datasources/verify", () => { + it("should be able to verify the connection", async () => { + await config.api.datasource.verify( + { + datasource: await databaseTestProviders.mysql.datasource(), + }, + { + body: { + connected: true, + }, + } + ) + }) + + it("should state an invalid datasource cannot connect", async () => { + const dbConfig = await databaseTestProviders.mysql.datasource() + await config.api.datasource.verify( + { + datasource: { + ...dbConfig, + config: { + ...dbConfig.config, + password: "wrongpassword", + }, + }, + }, + { + body: { + connected: false, + error: + "Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.", + }, + } + ) + }) + }) + + describe("POST /api/datasources/info", () => { + it("should fetch information about mysql datasource", async () => { + const primaryName = primaryMySqlTable.name + const response = await makeRequest("post", "/api/datasources/info", { + datasource: mysqlDatasource, + }) + expect(response.status).toBe(200) + expect(response.body.tableNames).toBeDefined() + expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1) + }) + }) + + describe("Integration compatibility with mysql search_path", () => { + let client: mysql.Connection, pathDatasource: Datasource + const database = "test1" + const database2 = "test-2" + + beforeAll(async () => { + const dsConfig = await databaseTestProviders.mysql.datasource() + const dbConfig = dsConfig.config! + + client = await mysql.createConnection(dbConfig) + await client.query(`CREATE DATABASE \`${database}\`;`) + await client.query(`CREATE DATABASE \`${database2}\`;`) + + const pathConfig: any = { + ...dsConfig, + config: { + ...dbConfig, + database, + }, + } + pathDatasource = await config.api.datasource.create(pathConfig) + }) + + afterAll(async () => { + await client.query(`DROP DATABASE \`${database}\`;`) + await client.query(`DROP DATABASE \`${database2}\`;`) + await client.end() + }) + + it("discovers tables from any schema in search path", async () => { + await client.query( + `CREATE TABLE \`${database}\`.table1 (id1 SERIAL PRIMARY KEY);` + ) + const response = await makeRequest("post", "/api/datasources/info", { + datasource: pathDatasource, + }) + expect(response.status).toBe(200) + expect(response.body.tableNames).toBeDefined() + expect(response.body.tableNames).toEqual( + expect.arrayContaining(["table1"]) + ) + }) + + it("does not mix columns from different tables", async () => { + const repeated_table_name = "table_same_name" + await client.query( + `CREATE TABLE \`${database}\`.${repeated_table_name} (id SERIAL PRIMARY KEY, val1 TEXT);` + ) + await client.query( + `CREATE TABLE \`${database2}\`.${repeated_table_name} (id2 SERIAL PRIMARY KEY, val2 TEXT);` + ) + const response = await makeRequest( + "post", + `/api/datasources/${pathDatasource._id}/schema`, + { + tablesFilter: [repeated_table_name], + } + ) + expect(response.status).toBe(200) + expect( + response.body.datasource.entities[repeated_table_name].schema + ).toBeDefined() + const schema = + response.body.datasource.entities[repeated_table_name].schema + expect(Object.keys(schema).sort()).toEqual(["id", "val1"]) + }) + }) + + describe("POST /api/tables/", () => { + let client: mysql.Connection + const emitDatasourceUpdateMock = jest.fn() + + beforeEach(async () => { + client = await mysql.createConnection( + ( + await databaseTestProviders.mysql.datasource() + ).config! + ) + mysqlDatasource = await config.api.datasource.create( + await databaseTestProviders.mysql.datasource() + ) + }) + + afterEach(async () => { + await client.end() + }) + + it("will emit the datasource entity schema with externalType to the front-end when adding a new column", async () => { + const addColumnToTable: TableRequest = { + type: "table", + sourceType: TableSourceType.EXTERNAL, + name: "table", + sourceId: mysqlDatasource._id!, + primary: ["id"], + schema: { + id: { + type: FieldType.AUTO, + name: "id", + autocolumn: true, + }, + new_column: { + type: FieldType.NUMBER, + name: "new_column", + }, + }, + _add: { + name: "new_column", + }, + } + + jest + .spyOn(builderSocket!, "emitDatasourceUpdate") + .mockImplementation(emitDatasourceUpdateMock) + + await makeRequest("post", "/api/tables/", addColumnToTable) + + const expectedTable: TableRequest = { + ...addColumnToTable, + schema: { + id: { + type: FieldType.NUMBER, + name: "id", + autocolumn: true, + constraints: { + presence: false, + }, + externalType: "int unsigned", + }, + new_column: { + type: FieldType.NUMBER, + name: "new_column", + autocolumn: false, + constraints: { + presence: false, + }, + externalType: "float(8,2)", + }, + }, + created: true, + _id: `${mysqlDatasource._id}__table`, + } + delete expectedTable._add + + expect(emitDatasourceUpdateMock).toBeCalledTimes(1) + const emittedDatasource: Datasource = + emitDatasourceUpdateMock.mock.calls[0][1] + expect(emittedDatasource.entities!["table"]).toEqual(expectedTable) + }) + + it("will rename a column", async () => { + await makeRequest("post", "/api/tables/", primaryMySqlTable) + + let renameColumnOnTable: TableRequest = { + ...primaryMySqlTable, + schema: { + id: { + name: "id", + type: FieldType.AUTO, + autocolumn: true, + externalType: "unsigned integer", + }, + name: { + name: "name", + type: FieldType.STRING, + externalType: "text", + }, + description: { + name: "description", + type: FieldType.STRING, + externalType: "text", + }, + age: { + name: "age", + type: FieldType.NUMBER, + externalType: "float(8,2)", + }, + }, + } + + const response = await makeRequest( + "post", + "/api/tables/", + renameColumnOnTable + ) + mysqlDatasource = ( + await makeRequest( + "post", + `/api/datasources/${mysqlDatasource._id}/schema` + ) + ).body.datasource + + expect(response.status).toEqual(200) + expect( + Object.keys(mysqlDatasource.entities![primaryMySqlTable.name].schema) + ).toEqual(["id", "name", "description", "age"]) + }) + }) +}) diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts index 0031fe1136..7c14bc2b69 100644 --- a/packages/server/src/integration-test/postgres.spec.ts +++ b/packages/server/src/integration-test/postgres.spec.ts @@ -398,7 +398,7 @@ describe("postgres integrations", () => { expect(res.status).toBe(200) expect(res.body).toEqual(updatedRow) - const persistedRow = await config.getRow( + const persistedRow = await config.api.row.get( primaryPostgresTable._id!, row.id ) @@ -1040,28 +1040,37 @@ describe("postgres integrations", () => { describe("POST /api/datasources/verify", () => { it("should be able to verify the connection", async () => { - const response = await config.api.datasource.verify({ - datasource: await databaseTestProviders.postgres.datasource(), - }) - expect(response.status).toBe(200) - expect(response.body.connected).toBe(true) + await config.api.datasource.verify( + { + datasource: await databaseTestProviders.postgres.datasource(), + }, + { + body: { + connected: true, + }, + } + ) }) it("should state an invalid datasource cannot connect", async () => { const dbConfig = await databaseTestProviders.postgres.datasource() - const response = await config.api.datasource.verify({ - datasource: { - ...dbConfig, - config: { - ...dbConfig.config, - password: "wrongpassword", + await config.api.datasource.verify( + { + datasource: { + ...dbConfig, + config: { + ...dbConfig.config, + password: "wrongpassword", + }, }, }, - }) - - expect(response.status).toBe(200) - expect(response.body.connected).toBe(false) - expect(response.body.error).toBeDefined() + { + body: { + connected: false, + error: 'password authentication failed for user "postgres"', + }, + } + ) }) }) diff --git a/packages/server/src/integrations/base/query.ts b/packages/server/src/integrations/base/query.ts index 4f31e37744..b906ecbb1b 100644 --- a/packages/server/src/integrations/base/query.ts +++ b/packages/server/src/integrations/base/query.ts @@ -1,11 +1,15 @@ -import { QueryJson, Datasource } from "@budibase/types" +import { + QueryJson, + Datasource, + DatasourcePlusQueryResponse, +} from "@budibase/types" import { getIntegration } from "../index" import sdk from "../../sdk" export async function makeExternalQuery( datasource: Datasource, json: QueryJson -) { +): DatasourcePlusQueryResponse { datasource = await sdk.datasources.enrich(datasource) const Integration = await getIntegration(datasource.source) // query is the opinionated function diff --git a/packages/server/src/integrations/base/sql.ts b/packages/server/src/integrations/base/sql.ts index 29c8416b34..be1883c8ec 100644 --- a/packages/server/src/integrations/base/sql.ts +++ b/packages/server/src/integrations/base/sql.ts @@ -12,12 +12,13 @@ import { } from "@budibase/types" import environment from "../../environment" +type QueryFunction = (query: Knex.SqlNative, operation: Operation) => any + const envLimit = environment.SQL_MAX_ROWS ? parseInt(environment.SQL_MAX_ROWS) : null const BASE_LIMIT = envLimit || 5000 -type KnexQuery = Knex.QueryBuilder | Knex // these are invalid dates sent by the client, need to convert them to a real max date const MIN_ISO_DATE = "0000-00-00T00:00:00.000Z" const MAX_ISO_DATE = "9999-00-00T00:00:00.000Z" @@ -127,10 +128,15 @@ class InternalBuilder { // right now we only do filters on the specific table being queried addFilters( - query: KnexQuery, + query: Knex.QueryBuilder, filters: SearchFilters | undefined, - opts: { relationship?: boolean; tableName?: string } - ): KnexQuery { + tableName: string, + opts: { aliases?: Record; relationship?: boolean } + ): Knex.QueryBuilder { + function getTableName(name: string) { + const alias = opts.aliases?.[name] + return alias || name + } function iterate( structure: { [key: string]: any }, fn: (key: string, value: any) => void @@ -139,10 +145,11 @@ class InternalBuilder { const updatedKey = dbCore.removeKeyNumbering(key) const isRelationshipField = updatedKey.includes(".") if (!opts.relationship && !isRelationshipField) { - fn(`${opts.tableName}.${updatedKey}`, value) + fn(`${getTableName(tableName)}.${updatedKey}`, value) } if (opts.relationship && isRelationshipField) { - fn(updatedKey, value) + const [filterTableName, property] = updatedKey.split(".") + fn(`${getTableName(filterTableName)}.${property}`, value) } } } @@ -314,32 +321,47 @@ class InternalBuilder { return query } - addSorting(query: KnexQuery, json: QueryJson): KnexQuery { + addSorting(query: Knex.QueryBuilder, json: QueryJson): Knex.QueryBuilder { let { sort, paginate } = json const table = json.meta?.table + const aliases = json.tableAliases + const aliased = + table?.name && aliases?.[table.name] ? aliases[table.name] : table?.name if (sort && Object.keys(sort || {}).length > 0) { for (let [key, value] of Object.entries(sort)) { const direction = value.direction === SortDirection.ASCENDING ? "asc" : "desc" - query = query.orderBy(`${table?.name}.${key}`, direction) + query = query.orderBy(`${aliased}.${key}`, direction) } } else if (this.client === SqlClient.MS_SQL && paginate?.limit) { // @ts-ignore - query = query.orderBy(`${table?.name}.${table?.primary[0]}`) + query = query.orderBy(`${aliased}.${table?.primary[0]}`) } return query } + tableNameWithSchema( + tableName: string, + opts?: { alias?: string; schema?: string } + ) { + let withSchema = opts?.schema ? `${opts.schema}.${tableName}` : tableName + if (opts?.alias) { + withSchema += ` as ${opts.alias}` + } + return withSchema + } + addRelationships( - query: KnexQuery, + query: Knex.QueryBuilder, fromTable: string, relationships: RelationshipsJson[] | undefined, - schema: string | undefined - ): KnexQuery { + schema: string | undefined, + aliases?: Record + ): Knex.QueryBuilder { if (!relationships) { return query } - const tableSets: Record = {} + const tableSets: Record = {} // aggregate into table sets (all the same to tables) for (let relationship of relationships) { const keyObj: { toTable: string; throughTable: string | undefined } = { @@ -358,10 +380,17 @@ class InternalBuilder { } for (let [key, relationships] of Object.entries(tableSets)) { const { toTable, throughTable } = JSON.parse(key) - const toTableWithSchema = schema ? `${schema}.${toTable}` : toTable - const throughTableWithSchema = schema - ? `${schema}.${throughTable}` - : throughTable + const toAlias = aliases?.[toTable] || toTable, + throughAlias = aliases?.[throughTable] || throughTable, + fromAlias = aliases?.[fromTable] || fromTable + let toTableWithSchema = this.tableNameWithSchema(toTable, { + alias: toAlias, + schema, + }) + let throughTableWithSchema = this.tableNameWithSchema(throughTable, { + alias: throughAlias, + schema, + }) if (!throughTable) { // @ts-ignore query = query.leftJoin(toTableWithSchema, function () { @@ -369,7 +398,7 @@ class InternalBuilder { const from = relationship.from, to = relationship.to // @ts-ignore - this.orOn(`${fromTable}.${from}`, "=", `${toTable}.${to}`) + this.orOn(`${fromAlias}.${from}`, "=", `${toAlias}.${to}`) } }) } else { @@ -381,9 +410,9 @@ class InternalBuilder { const from = relationship.from // @ts-ignore this.orOn( - `${fromTable}.${fromPrimary}`, + `${fromAlias}.${fromPrimary}`, "=", - `${throughTable}.${from}` + `${throughAlias}.${from}` ) } }) @@ -392,7 +421,7 @@ class InternalBuilder { const toPrimary = relationship.toPrimary const to = relationship.to // @ts-ignore - this.orOn(`${toTable}.${toPrimary}`, `${throughTable}.${to}`) + this.orOn(`${toAlias}.${toPrimary}`, `${throughAlias}.${to}`) } }) } @@ -400,12 +429,27 @@ class InternalBuilder { return query.limit(BASE_LIMIT) } - create(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { - const { endpoint, body } = json - let query: KnexQuery = knex(endpoint.entityId) + knexWithAlias( + knex: Knex, + endpoint: QueryJson["endpoint"], + aliases?: QueryJson["tableAliases"] + ): Knex.QueryBuilder { + const tableName = endpoint.entityId + const tableAlias = aliases?.[tableName] + let table: string | Record = tableName + if (tableAlias) { + table = { [tableAlias]: tableName } + } + let query = knex(table) if (endpoint.schema) { query = query.withSchema(endpoint.schema) } + return query + } + + create(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { + const { endpoint, body } = json + let query = this.knexWithAlias(knex, endpoint) const parsedBody = parseBody(body) // make sure no null values in body for creation for (let [key, value] of Object.entries(parsedBody)) { @@ -422,12 +466,9 @@ class InternalBuilder { } } - bulkCreate(knex: Knex, json: QueryJson): KnexQuery { + bulkCreate(knex: Knex, json: QueryJson): Knex.QueryBuilder { const { endpoint, body } = json - let query: KnexQuery = knex(endpoint.entityId) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } + let query = this.knexWithAlias(knex, endpoint) if (!Array.isArray(body)) { return query } @@ -435,8 +476,10 @@ class InternalBuilder { return query.insert(parsedBody) } - read(knex: Knex, json: QueryJson, limit: number): KnexQuery { - let { endpoint, resource, filters, paginate, relationships } = json + read(knex: Knex, json: QueryJson, limit: number): Knex.QueryBuilder { + let { endpoint, resource, filters, paginate, relationships, tableAliases } = + json + const tableName = endpoint.entityId // select all if not specified if (!resource) { @@ -462,21 +505,20 @@ class InternalBuilder { foundLimit = paginate.limit } // start building the query - let query: KnexQuery = knex(tableName).limit(foundLimit) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } + let query = this.knexWithAlias(knex, endpoint, tableAliases) + query = query.limit(foundLimit) if (foundOffset) { query = query.offset(foundOffset) } - query = this.addFilters(query, filters, { tableName }) + query = this.addFilters(query, filters, tableName, { + aliases: tableAliases, + }) // add sorting to pre-query query = this.addSorting(query, json) - // @ts-ignore - let preQuery: KnexQuery = knex({ - // @ts-ignore - [tableName]: query, - }).select(selectStatement) + const alias = tableAliases?.[tableName] || tableName + let preQuery = knex({ + [alias]: query, + } as any).select(selectStatement) as any // have to add after as well (this breaks MS-SQL) if (this.client !== SqlClient.MS_SQL) { preQuery = this.addSorting(preQuery, json) @@ -486,19 +528,22 @@ class InternalBuilder { preQuery, tableName, relationships, - endpoint.schema + endpoint.schema, + tableAliases ) - return this.addFilters(query, filters, { relationship: true }) + return this.addFilters(query, filters, tableName, { + relationship: true, + aliases: tableAliases, + }) } - update(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { - const { endpoint, body, filters } = json - let query: KnexQuery = knex(endpoint.entityId) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } + update(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { + const { endpoint, body, filters, tableAliases } = json + let query = this.knexWithAlias(knex, endpoint, tableAliases) const parsedBody = parseBody(body) - query = this.addFilters(query, filters, { tableName: endpoint.entityId }) + query = this.addFilters(query, filters, endpoint.entityId, { + aliases: tableAliases, + }) // mysql can't use returning if (opts.disableReturning) { return query.update(parsedBody) @@ -507,13 +552,12 @@ class InternalBuilder { } } - delete(knex: Knex, json: QueryJson, opts: QueryOptions): KnexQuery { - const { endpoint, filters } = json - let query: KnexQuery = knex(endpoint.entityId) - if (endpoint.schema) { - query = query.withSchema(endpoint.schema) - } - query = this.addFilters(query, filters, { tableName: endpoint.entityId }) + delete(knex: Knex, json: QueryJson, opts: QueryOptions): Knex.QueryBuilder { + const { endpoint, filters, tableAliases } = json + let query = this.knexWithAlias(knex, endpoint, tableAliases) + query = this.addFilters(query, filters, endpoint.entityId, { + aliases: tableAliases, + }) // mysql can't use returning if (opts.disableReturning) { return query.delete() @@ -537,10 +581,10 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { * which for the sake of mySQL stops adding the returning statement to inserts, updates and deletes. * @return the query ready to be passed to the driver. */ - _query(json: QueryJson, opts: QueryOptions = {}) { + _query(json: QueryJson, opts: QueryOptions = {}): Knex.SqlNative | Knex.Sql { const sqlClient = this.getSqlClient() const client = knex({ client: sqlClient }) - let query + let query: Knex.QueryBuilder const builder = new InternalBuilder(sqlClient) switch (this._operation(json)) { case Operation.CREATE: @@ -565,12 +609,10 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { default: throw `Operation type is not supported by SQL query builder` } - - // @ts-ignore return query.toSQL().toNative() } - async getReturningRow(queryFn: Function, json: QueryJson) { + async getReturningRow(queryFn: QueryFunction, json: QueryJson) { if (!json.extra || !json.extra.idFilter) { return {} } @@ -582,7 +624,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { resource: { fields: [], }, - filters: json.extra.idFilter, + filters: json.extra?.idFilter, paginate: { limit: 1, }, @@ -611,7 +653,7 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { // this function recreates the returning functionality of postgres async queryWithReturning( json: QueryJson, - queryFn: Function, + queryFn: QueryFunction, processFn: Function = (result: any) => result ) { const sqlClient = this.getSqlClient() @@ -648,6 +690,18 @@ class SqlQueryBuilder extends SqlTableQueryBuilder { } return results.length ? results : [{ [operation.toLowerCase()]: true }] } + + log(query: string, values?: any[]) { + if (!environment.SQL_LOGGING_ENABLE) { + return + } + const sqlClient = this.getSqlClient() + let string = `[SQL] [${sqlClient.toUpperCase()}] query="${query}"` + if (values) { + string += ` values="${values.join(", ")}"` + } + console.log(string) + } } export default SqlQueryBuilder diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts index f553690b23..f3560791e6 100644 --- a/packages/server/src/integrations/base/sqlTable.ts +++ b/packages/server/src/integrations/base/sqlTable.ts @@ -9,7 +9,7 @@ import { Table, FieldType, } from "@budibase/types" -import { breakExternalTableId } from "../utils" +import { breakExternalTableId, SqlClient } from "../utils" import SchemaBuilder = Knex.SchemaBuilder import CreateTableBuilder = Knex.CreateTableBuilder import { utils } from "@budibase/shared-core" @@ -135,7 +135,8 @@ function generateSchema( // need to check if any columns have been deleted if (oldTable) { const deletedColumns = Object.entries(oldTable.schema).filter( - ([key, column]) => isIgnoredType(column.type) && table.schema[key] == null + ([key, column]) => + !isIgnoredType(column.type) && table.schema[key] == null ) deletedColumns.forEach(([key, column]) => { if (renamed?.old === key || isIgnoredType(column.type)) { @@ -197,13 +198,14 @@ class SqlTableQueryBuilder { return json.endpoint.operation } - _tableQuery(json: QueryJson): any { + _tableQuery(json: QueryJson): Knex.Sql | Knex.SqlNative { let client = knex({ client: this.sqlClient }).schema - if (json?.endpoint?.schema) { - client = client.withSchema(json.endpoint.schema) + let schemaName = json?.endpoint?.schema + if (schemaName) { + client = client.withSchema(schemaName) } - let query + let query: Knex.SchemaBuilder if (!json.table || !json.meta || !json.meta.tables) { throw "Cannot execute without table being specified" } @@ -215,6 +217,18 @@ class SqlTableQueryBuilder { if (!json.meta || !json.meta.table) { throw "Must specify old table for update" } + // renameColumn does not work for MySQL, so return a raw query + if (this.sqlClient === SqlClient.MY_SQL && json.meta.renamed) { + const updatedColumn = json.meta.renamed.updated + const tableName = schemaName + ? `\`${schemaName}\`.\`${json.table.name}\`` + : `\`${json.table.name}\`` + const externalType = json.table.schema[updatedColumn].externalType! + return { + sql: `alter table ${tableName} change column \`${json.meta.renamed.old}\` \`${updatedColumn}\` ${externalType};`, + bindings: [], + } + } query = buildUpdateTable( client, json.table, diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 58c867ea0b..32398bde41 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -16,6 +16,7 @@ import { Table, TableRequest, TableSourceType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { OAuth2Client } from "google-auth-library" import { @@ -334,7 +335,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { return { tables: externalTables, errors } } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const sheet = json.endpoint.entityId switch (json.endpoint.operation) { case Operation.CREATE: @@ -384,7 +385,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { } try { await this.connect() - return await this.client.addSheet({ title: name, headerValues: [name] }) + await this.client.addSheet({ title: name, headerValues: [name] }) } catch (err) { console.error("Error creating new table in google sheets", err) throw err @@ -450,7 +451,7 @@ class GoogleSheetsIntegration implements DatasourcePlus { try { await this.connect() const sheetToDelete = this.client.sheetsByTitle[sheet] - return await sheetToDelete.delete() + await sheetToDelete.delete() } catch (err) { console.error("Error deleting table in google sheets", err) throw err diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts index 49761bac85..8cbc29251b 100644 --- a/packages/server/src/integrations/index.ts +++ b/packages/server/src/integrations/index.ts @@ -14,7 +14,12 @@ import firebase from "./firebase" import redis from "./redis" import snowflake from "./snowflake" import oracle from "./oracle" -import { SourceName, Integration, PluginType } from "@budibase/types" +import { + SourceName, + Integration, + PluginType, + IntegrationBase, +} from "@budibase/types" import { getDatasourcePlugin } from "../utilities/fileSystem" import env from "../environment" import cloneDeep from "lodash/cloneDeep" @@ -37,26 +42,31 @@ const DEFINITIONS: Record = { [SourceName.REDIS]: redis.schema, [SourceName.SNOWFLAKE]: snowflake.schema, [SourceName.ORACLE]: undefined, + [SourceName.BUDIBASE]: undefined, } -const INTEGRATIONS: Record = { - [SourceName.POSTGRES]: postgres.integration, - [SourceName.DYNAMODB]: dynamodb.integration, - [SourceName.MONGODB]: mongodb.integration, - [SourceName.ELASTICSEARCH]: elasticsearch.integration, - [SourceName.COUCHDB]: couchdb.integration, - [SourceName.SQL_SERVER]: sqlServer.integration, - [SourceName.S3]: s3.integration, - [SourceName.AIRTABLE]: airtable.integration, - [SourceName.MYSQL]: mysql.integration, - [SourceName.ARANGODB]: arangodb.integration, - [SourceName.REST]: rest.integration, - [SourceName.FIRESTORE]: firebase.integration, - [SourceName.GOOGLE_SHEETS]: googlesheets.integration, - [SourceName.REDIS]: redis.integration, - [SourceName.SNOWFLAKE]: snowflake.integration, - [SourceName.ORACLE]: undefined, -} +type IntegrationBaseConstructor = new (...args: any[]) => IntegrationBase + +const INTEGRATIONS: Record = + { + [SourceName.POSTGRES]: postgres.integration, + [SourceName.DYNAMODB]: dynamodb.integration, + [SourceName.MONGODB]: mongodb.integration, + [SourceName.ELASTICSEARCH]: elasticsearch.integration, + [SourceName.COUCHDB]: couchdb.integration, + [SourceName.SQL_SERVER]: sqlServer.integration, + [SourceName.S3]: s3.integration, + [SourceName.AIRTABLE]: airtable.integration, + [SourceName.MYSQL]: mysql.integration, + [SourceName.ARANGODB]: arangodb.integration, + [SourceName.REST]: rest.integration, + [SourceName.FIRESTORE]: firebase.integration, + [SourceName.GOOGLE_SHEETS]: googlesheets.integration, + [SourceName.REDIS]: redis.integration, + [SourceName.SNOWFLAKE]: snowflake.integration, + [SourceName.ORACLE]: undefined, + [SourceName.BUDIBASE]: undefined, + } // optionally add oracle integration if the oracle binary can be installed if ( diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index d0a06d4476..f87e248ac0 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -13,6 +13,7 @@ import { SourceName, Schema, TableSourceType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -329,6 +330,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { operation === Operation.CREATE ? `${query.sql}; SELECT SCOPE_IDENTITY() AS id;` : query.sql + this.log(sql, query.bindings) return await request.query(sql) } catch (err: any) { let readableMessage = getReadableErrorMessage( @@ -492,7 +494,7 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { return response.recordset || [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const schema = this.config.schema await this.connect() if (schema && schema !== DEFAULT_SCHEMA && json?.endpoint) { diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 8ec73307f4..f629381807 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -12,7 +12,7 @@ import { SourceName, Schema, TableSourceType, - FieldType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -261,6 +261,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { const bindings = opts?.disableCoercion ? baseBindings : bindingTypeCoerce(baseBindings) + this.log(query.sql, bindings) // Node MySQL is callback based, so we must wrap our call in a promise const response = await this.client!.query(query.sql, bindings) return response[0] @@ -380,7 +381,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { return results.length ? results : [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { await this.connect() try { const queryFn = (query: any) => diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index cf76622581..08f3058d63 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -12,6 +12,8 @@ import { ConnectionInfo, Schema, TableSourceType, + Row, + DatasourcePlusQueryResponse, } from "@budibase/types" import { buildExternalTableId, @@ -368,6 +370,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { const options: ExecuteOptions = { autoCommit: true } const bindings: BindParameters = query.bindings || [] + this.log(query.sql, bindings) return await connection.execute(query.sql, bindings, options) } finally { if (connection) { @@ -419,9 +422,9 @@ class OracleIntegration extends Sql implements DatasourcePlus { : [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const operation = this._operation(json) - const input = this._query(json, { disableReturning: true }) + const input = this._query(json, { disableReturning: true }) as SqlQuery if (Array.isArray(input)) { const responses = [] for (let query of input) { @@ -443,7 +446,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { if (deletedRows?.rows?.length) { return deletedRows.rows } else if (response.rows?.length) { - return response.rows + return response.rows as Row[] } else { // get the last row that was updated if ( @@ -454,7 +457,7 @@ class OracleIntegration extends Sql implements DatasourcePlus { const lastRow = await this.internalQuery({ sql: `SELECT * FROM \"${json.endpoint.entityId}\" WHERE ROWID = '${response.lastRowid}'`, }) - return lastRow.rows + return lastRow.rows as Row[] } else { return [{ [operation.toLowerCase()]: true }] } diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index 9949dee6bb..635d834761 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -12,6 +12,7 @@ import { SourceName, Schema, TableSourceType, + DatasourcePlusQueryResponse, } from "@budibase/types" import { getSqlQuery, @@ -268,7 +269,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus { } } try { - return await client.query(query.sql, query.bindings || []) + const bindings = query.bindings || [] + this.log(query.sql, bindings) + return await client.query(query.sql, bindings) } catch (err: any) { await this.closeConnection() let readableMessage = getReadableErrorMessage( @@ -417,9 +420,9 @@ class PostgresIntegration extends Sql implements DatasourcePlus { return response.rows.length ? response.rows : [{ deleted: true }] } - async query(json: QueryJson) { + async query(json: QueryJson): DatasourcePlusQueryResponse { const operation = this._operation(json).toLowerCase() - const input = this._query(json) + const input = this._query(json) as SqlQuery if (Array.isArray(input)) { const responses = [] for (let query of input) { diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts index 5cc4849d03..f4eaf2859c 100644 --- a/packages/server/src/integrations/tests/sql.spec.ts +++ b/packages/server/src/integrations/tests/sql.spec.ts @@ -1,5 +1,12 @@ -const Sql = require("../base/sql").default -const { SqlClient } = require("../utils") +import { SqlClient } from "../utils" +import Sql from "../base/sql" +import { + Operation, + QueryJson, + TableSourceType, + Table, + FieldType, +} from "@budibase/types" const TABLE_NAME = "test" @@ -17,7 +24,7 @@ function generateReadJson({ filters, sort, paginate, -}: any = {}) { +}: any = {}): QueryJson { return { endpoint: endpoint(table || TABLE_NAME, "READ"), resource: { @@ -28,41 +35,51 @@ function generateReadJson({ paginate: paginate || {}, meta: { table: { + type: "table", + sourceType: TableSourceType.EXTERNAL, + sourceId: "SOURCE_ID", + schema: {}, name: table || TABLE_NAME, primary: ["id"], - }, + } as any, }, } } -function generateCreateJson(table = TABLE_NAME, body = {}) { +function generateCreateJson(table = TABLE_NAME, body = {}): QueryJson { return { endpoint: endpoint(table, "CREATE"), body, } } -function generateUpdateJson(table = TABLE_NAME, body = {}, filters = {}) { +function generateUpdateJson({ + table = TABLE_NAME, + body = {}, + filters = {}, + meta = {}, +}): QueryJson { return { endpoint: endpoint(table, "UPDATE"), filters, body, + meta, } } -function generateDeleteJson(table = TABLE_NAME, filters = {}) { +function generateDeleteJson(table = TABLE_NAME, filters = {}): QueryJson { return { endpoint: endpoint(table, "DELETE"), filters, } } -function generateRelationshipJson(config: { schema?: string } = {}) { +function generateRelationshipJson(config: { schema?: string } = {}): QueryJson { return { endpoint: { datasourceId: "Postgres", entityId: "brands", - operation: "READ", + operation: Operation.READ, schema: config.schema, }, resource: { @@ -76,7 +93,6 @@ function generateRelationshipJson(config: { schema?: string } = {}) { }, filters: {}, sort: {}, - paginate: {}, relationships: [ { from: "brand_id", @@ -240,17 +256,17 @@ describe("SQL query builder", () => { it("should test an update statement", () => { const query = sql._query( - generateUpdateJson( - TABLE_NAME, - { + generateUpdateJson({ + table: TABLE_NAME, + body: { name: "John", }, - { + filters: { equal: { id: 1001, }, - } - ) + }, + }) ) expect(query).toEqual({ bindings: ["John", 1001], @@ -502,7 +518,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson({ schema: "production" })) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "production"."brands" limit $1) as "brands" left join "production"."products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, }) }) @@ -510,7 +526,7 @@ describe("SQL query builder", () => { const query = sql._query(generateRelationshipJson()) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, + sql: `select "brands"."brand_id" as "brands.brand_id", "brands"."brand_name" as "brands.brand_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name", "products"."brand_id" as "products.brand_id" from (select * from "brands" limit $1) as "brands" left join "products" as "products" on "brands"."brand_id" = "products"."brand_id" limit $2`, }) }) @@ -520,7 +536,7 @@ describe("SQL query builder", () => { ) expect(query).toEqual({ bindings: [500, 5000], - sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" on "products"."product_id" = "stocks"."product_id" limit $2`, + sql: `select "stores"."store_id" as "stores.store_id", "stores"."store_name" as "stores.store_name", "products"."product_id" as "products.product_id", "products"."product_name" as "products.product_name" from (select * from "production"."stores" limit $1) as "stores" left join "production"."stocks" as "stocks" on "stores"."store_id" = "stocks"."store_id" left join "production"."products" as "products" on "products"."product_id" = "stocks"."product_id" limit $2`, }) }) @@ -682,4 +698,99 @@ describe("SQL query builder", () => { sql: `insert into \"test\" (\"name\") values ($1) returning *`, }) }) + + it("should be able to rename column for MySQL", () => { + const table: Table = { + type: "table", + sourceType: TableSourceType.EXTERNAL, + name: TABLE_NAME, + schema: { + first_name: { + type: FieldType.STRING, + name: "first_name", + externalType: "varchar(45)", + }, + }, + sourceId: "SOURCE_ID", + } + const oldTable: Table = { + ...table, + schema: { + name: { + type: FieldType.STRING, + name: "name", + externalType: "varchar(45)", + }, + }, + } + const query = new Sql(SqlClient.MY_SQL, limit)._query({ + table, + endpoint: { + datasourceId: "MySQL", + operation: Operation.UPDATE_TABLE, + entityId: TABLE_NAME, + }, + meta: { + table: oldTable, + tables: { [oldTable.name]: oldTable }, + renamed: { + old: "name", + updated: "first_name", + }, + }, + }) + expect(query).toEqual({ + bindings: [], + sql: `alter table \`${TABLE_NAME}\` change column \`name\` \`first_name\` varchar(45);`, + }) + }) + + it("should be able to delete a column", () => { + const table: Table = { + type: "table", + sourceType: TableSourceType.EXTERNAL, + name: TABLE_NAME, + schema: { + first_name: { + type: FieldType.STRING, + name: "first_name", + externalType: "varchar(45)", + }, + }, + sourceId: "SOURCE_ID", + } + const oldTable: Table = { + ...table, + schema: { + first_name: { + type: FieldType.STRING, + name: "first_name", + externalType: "varchar(45)", + }, + last_name: { + type: FieldType.STRING, + name: "last_name", + externalType: "varchar(45)", + }, + }, + } + const query = sql._query({ + table, + endpoint: { + datasourceId: "Postgres", + operation: Operation.UPDATE_TABLE, + entityId: TABLE_NAME, + }, + meta: { + table: oldTable, + tables: [oldTable], + }, + }) + expect(query).toEqual([ + { + bindings: [], + sql: `alter table "${TABLE_NAME}" drop column "last_name"`, + }, + ]) + }) }) diff --git a/packages/server/src/integrations/tests/sqlAlias.spec.ts b/packages/server/src/integrations/tests/sqlAlias.spec.ts new file mode 100644 index 0000000000..dd82dadac0 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlAlias.spec.ts @@ -0,0 +1,335 @@ +import { Datasource, Operation, QueryJson, SourceName } from "@budibase/types" +import { join } from "path" +import Sql from "../base/sql" +import { SqlClient } from "../utils" +import AliasTables from "../../api/controllers/row/alias" +import { generator } from "@budibase/backend-core/tests" +import { Knex } from "knex" + +function multiline(sql: string) { + return sql.replace(/\n/g, "").replace(/ +/g, " ") +} + +describe("Captures of real examples", () => { + const limit = 5000 + const relationshipLimit = 100 + + function getJson(name: string): QueryJson { + return require(join(__dirname, "sqlQueryJson", name)) as QueryJson + } + + describe("create", () => { + it("should create a row with relationships", () => { + const queryJson = getJson("createWithRelationships.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: ["A Street", 34, "London", "A", "B", "designer", 1990], + sql: multiline(`insert into "persons" ("address", "age", "city", "firstname", "lastname", "type", "year") + values ($1, $2, $3, $4, $5, $6, $7) returning *`), + }) + }) + }) + + describe("read", () => { + it("should handle basic retrieval with relationships", () => { + const queryJson = getJson("basicFetchWithRelationships.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [relationshipLimit, limit], + sql: multiline(`select "a"."year" as "a.year", "a"."firstname" as "a.firstname", "a"."personid" as "a.personid", + "a"."address" as "a.address", "a"."age" as "a.age", "a"."type" as "a.type", "a"."city" as "a.city", + "a"."lastname" as "a.lastname", "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", + "b"."taskid" as "b.taskid", "b"."completed" as "b.completed", "b"."qaid" as "b.qaid", + "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" + from (select * from "persons" as "a" order by "a"."firstname" asc limit $1) as "a" + left join "tasks" as "b" on "a"."personid" = "b"."qaid" or "a"."personid" = "b"."executorid" + order by "a"."firstname" asc limit $2`), + }) + }) + + it("should handle filtering by relationship", () => { + const queryJson = getJson("filterByRelationship.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [relationshipLimit, "assembling", limit], + sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", + "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" + from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" + left join "products_tasks" as "c" on "a"."productid" = "c"."productid" + left join "tasks" as "b" on "b"."taskid" = "c"."taskid" where "b"."taskname" = $2 + order by "a"."productname" asc limit $3`), + }) + }) + + it("should handle fetching many to many relationships", () => { + const queryJson = getJson("fetchManyToMany.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [relationshipLimit, limit], + sql: multiline(`select "a"."productname" as "a.productname", "a"."productid" as "a.productid", + "b"."executorid" as "b.executorid", "b"."taskname" as "b.taskname", "b"."taskid" as "b.taskid", + "b"."completed" as "b.completed", "b"."qaid" as "b.qaid" + from (select * from "products" as "a" order by "a"."productname" asc limit $1) as "a" + left join "products_tasks" as "c" on "a"."productid" = "c"."productid" + left join "tasks" as "b" on "b"."taskid" = "c"."taskid" + order by "a"."productname" asc limit $2`), + }) + }) + + it("should handle enrichment of rows", () => { + const queryJson = getJson("enrichRelationship.json") + const filters = queryJson.filters?.oneOf?.taskid as number[] + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [...filters, limit, limit], + sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", + "a"."taskid" as "a.taskid", "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", + "b"."productname" as "b.productname", "b"."productid" as "b.productid" + from (select * from "tasks" as "a" where "a"."taskid" in ($1, $2) limit $3) as "a" + left join "products_tasks" as "c" on "a"."taskid" = "c"."taskid" + left join "products" as "b" on "b"."productid" = "c"."productid" limit $4`), + }) + }) + + it("should manage query with many relationship filters", () => { + const queryJson = getJson("manyRelationshipFilters.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + const filters = queryJson.filters + const notEqualsValue = Object.values(filters?.notEqual!)[0] + const rangeValue = Object.values(filters?.range!)[0] + const equalValue = Object.values(filters?.equal!)[0] + + expect(query).toEqual({ + bindings: [ + notEqualsValue, + relationshipLimit, + rangeValue.low, + rangeValue.high, + equalValue, + limit, + ], + sql: multiline(`select "a"."executorid" as "a.executorid", "a"."taskname" as "a.taskname", "a"."taskid" as "a.taskid", + "a"."completed" as "a.completed", "a"."qaid" as "a.qaid", "b"."productname" as "b.productname", + "b"."productid" as "b.productid", "c"."year" as "c.year", "c"."firstname" as "c.firstname", + "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", + "c"."city" as "c.city", "c"."lastname" as "c.lastname", "c"."year" as "c.year", "c"."firstname" as "c.firstname", + "c"."personid" as "c.personid", "c"."address" as "c.address", "c"."age" as "c.age", "c"."type" as "c.type", + "c"."city" as "c.city", "c"."lastname" as "c.lastname" + from (select * from "tasks" as "a" where not "a"."completed" = $1 + order by "a"."taskname" asc limit $2) as "a" + left join "products_tasks" as "d" on "a"."taskid" = "d"."taskid" + left join "products" as "b" on "b"."productid" = "d"."productid" + left join "persons" as "c" on "a"."executorid" = "c"."personid" or "a"."qaid" = "c"."personid" + where "c"."year" between $3 and $4 and "b"."productname" = $5 order by "a"."taskname" asc limit $6`), + }) + }) + }) + + describe("update", () => { + it("should handle performing a simple update", () => { + const queryJson = getJson("updateSimple.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], + sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, + "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`), + }) + }) + + it("should handle performing an update of relationships", () => { + const queryJson = getJson("updateRelationship.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: [1990, "C", "A Street", 34, "designer", "London", "B", 5], + sql: multiline(`update "persons" as "a" set "year" = $1, "firstname" = $2, "address" = $3, "age" = $4, + "type" = $5, "city" = $6, "lastname" = $7 where "a"."personid" = $8 returning *`), + }) + }) + }) + + describe("delete", () => { + it("should handle deleting with relationships", () => { + const queryJson = getJson("deleteSimple.json") + let query = new Sql(SqlClient.POSTGRES, limit)._query(queryJson) + expect(query).toEqual({ + bindings: ["ddd", ""], + sql: multiline(`delete from "compositetable" as "a" where "a"."keypartone" = $1 and "a"."keyparttwo" = $2 + returning "a"."keyparttwo" as "a.keyparttwo", "a"."keypartone" as "a.keypartone", "a"."name" as "a.name"`), + }) + }) + }) + + describe("returning (everything bar Postgres)", () => { + it("should be able to handle row returning", () => { + const queryJson = getJson("createSimple.json") + const SQL = new Sql(SqlClient.MS_SQL, limit) + let query = SQL._query(queryJson, { disableReturning: true }) + expect(query).toEqual({ + sql: "insert into [people] ([age], [name]) values (@p0, @p1)", + bindings: [22, "Test"], + }) + + // now check returning + let returningQuery: Knex.SqlNative = { sql: "", bindings: [] } + SQL.getReturningRow((input: Knex.SqlNative) => { + returningQuery = input + }, queryJson) + expect(returningQuery).toEqual({ + sql: "select * from (select top (@p0) * from [people] where [people].[name] = @p1 and [people].[age] = @p2 order by [people].[name] asc) as [people]", + bindings: [1, "Test", 22], + }) + }) + }) + + describe("check max character aliasing", () => { + it("should handle over 'z' max character alias", () => { + const tableNames = [] + for (let i = 0; i < 100; i++) { + tableNames.push(generator.guid()) + } + const aliasing = new AliasTables(tableNames) + let alias: string = "" + for (let table of tableNames) { + alias = aliasing.getAlias(table) + } + expect(alias).toEqual("cv") + }) + }) + + describe("check aliasing is disabled/enabled", () => { + const tables = ["tableA", "tableB"] + + function getDatasource(source: SourceName): Datasource { + return { + source, + type: "datasource", + isSQL: true, + } + } + + function getQuery(op: Operation, fields: string[] = ["a"]): QueryJson { + return { + endpoint: { datasourceId: "", entityId: "", operation: op }, + resource: { + fields, + }, + } + } + + it("should check for Postgres aliased status", () => { + const aliasing = new AliasTables(tables) + const datasource = getDatasource(SourceName.POSTGRES) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource) + ).toEqual(true) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource) + ).toEqual(true) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource) + ).toEqual(true) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource) + ).toEqual(true) + }) + + it("should check for MS-SQL aliased status", () => { + const aliasing = new AliasTables(tables) + const datasource = getDatasource(SourceName.SQL_SERVER) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource) + ).toEqual(false) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource) + ).toEqual(true) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource) + ).toEqual(false) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource) + ).toEqual(false) + }) + + it("should check for MySQL aliased status", () => { + const aliasing = new AliasTables(tables) + const datasource = getDatasource(SourceName.MYSQL) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource) + ).toEqual(false) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource) + ).toEqual(true) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource) + ).toEqual(false) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource) + ).toEqual(false) + }) + + it("should check for Oracle aliased status", () => { + const aliasing = new AliasTables(tables) + const datasource = getDatasource(SourceName.ORACLE) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.CREATE), datasource) + ).toEqual(false) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.READ), datasource) + ).toEqual(true) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.UPDATE), datasource) + ).toEqual(false) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.DELETE), datasource) + ).toEqual(false) + }) + + it("should disable aliasing for non-SQL datasources", () => { + const aliasing = new AliasTables(tables) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.READ), { + source: SourceName.GOOGLE_SHEETS, + type: "datasource", + isSQL: false, + }) + ) + }) + + it("should disable when no fields", () => { + const aliasing = new AliasTables(tables) + const datasource = getDatasource(SourceName.POSTGRES) + expect( + aliasing.isAliasingEnabled(getQuery(Operation.READ, []), datasource) + ).toEqual(false) + }) + }) + + describe("check some edge cases", () => { + const tableNames = ["hello", "world"] + + it("should handle quoted table names", () => { + const aliasing = new AliasTables(tableNames) + const aliased = aliasing.aliasField(`"hello"."field"`) + expect(aliased).toEqual(`"a"."field"`) + }) + + it("should handle quoted table names with graves", () => { + const aliasing = new AliasTables(tableNames) + const aliased = aliasing.aliasField("`hello`.`world`") + expect(aliased).toEqual("`a`.`world`") + }) + + it("should handle table names in table names correctly", () => { + const tableNames = ["he", "hell", "hello"] + const aliasing = new AliasTables(tableNames) + const aliased1 = aliasing.aliasField("`he`.`world`") + const aliased2 = aliasing.aliasField("`hell`.`world`") + const aliased3 = aliasing.aliasField("`hello`.`world`") + expect(aliased1).toEqual("`a`.`world`") + expect(aliased2).toEqual("`b`.`world`") + expect(aliased3).toEqual("`c`.`world`") + }) + }) +}) diff --git a/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json b/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json new file mode 100644 index 0000000000..ba7fa4ef9b --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/basicFetchWithRelationships.json @@ -0,0 +1,183 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid" + ] + }, + "filters": {}, + "sort": { + "firstname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "persons", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/createSimple.json b/packages/server/src/integrations/tests/sqlQueryJson/createSimple.json new file mode 100644 index 0000000000..33a88d30e1 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/createSimple.json @@ -0,0 +1,64 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae", + "entityId": "people", + "operation": "CREATE" + }, + "resource": { + "fields": [ + "a.name", + "a.age" + ] + }, + "filters": {}, + "relationships": [], + "body": { + "name": "Test", + "age": 22 + }, + "extra": { + "idFilter": { + "equal": { + "name": "Test", + "age": 22 + } + } + }, + "meta": { + "table": { + "_id": "datasource_plus_0ed5835e5552496285df546030f7c4ae__people", + "type": "table", + "sourceId": "datasource_plus_0ed5835e5552496285df546030f7c4ae", + "sourceType": "external", + "primary": [ + "name", + "age" + ], + "name": "people", + "schema": { + "name": { + "type": "string", + "externalType": "varchar", + "autocolumn": false, + "name": "name", + "constraints": { + "presence": true + } + }, + "age": { + "type": "number", + "externalType": "int", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + } + }, + "primaryDisplay": "name" + } + }, + "tableAliases": { + "people": "a" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json b/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json new file mode 100644 index 0000000000..82d85c417b --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/createWithRelationships.json @@ -0,0 +1,173 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "CREATE" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname" + ] + }, + "filters": {}, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "body": { + "year": 1990, + "firstname": "A", + "address": "A Street", + "age": 34, + "type": "designer", + "city": "London", + "lastname": "B" + }, + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "persons", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json b/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json new file mode 100644 index 0000000000..d6e099c4b6 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/deleteSimple.json @@ -0,0 +1,75 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "compositetable", + "operation": "DELETE" + }, + "resource": { + "fields": [ + "a.keyparttwo", + "a.keypartone", + "a.name" + ] + }, + "filters": { + "equal": { + "keypartone": "ddd", + "keyparttwo": "" + } + }, + "relationships": [], + "extra": { + "idFilter": { + "equal": { + "keypartone": "ddd", + "keyparttwo": "" + } + } + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__compositetable", + "primary": [ + "keypartone", + "keyparttwo" + ], + "name": "compositetable", + "schema": { + "keyparttwo": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "keyparttwo", + "constraints": { + "presence": true + } + }, + "keypartone": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "keypartone", + "constraints": { + "presence": true + } + }, + "name": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "name", + "constraints": { + "presence": false + } + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "keypartone" + } + }, + "tableAliases": { + "compositetable": "a" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json new file mode 100644 index 0000000000..d71f0552c6 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/enrichRelationship.json @@ -0,0 +1,123 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "entityId": "tasks", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.executorid", + "a.taskname", + "a.taskid", + "a.completed", + "a.qaid", + "b.productname", + "b.productid" + ] + }, + "filters": { + "oneOf": { + "taskid": [ + 1, + 2 + ] + } + }, + "relationships": [ + { + "tableName": "products", + "column": "products", + "through": "products_tasks", + "from": "taskid", + "to": "productid", + "fromPrimary": "taskid", + "toPrimary": "productid", + "aliases": { + "products_tasks": "c", + "products": "b", + "tasks": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", + "primary": [ + "taskid" + ], + "name": "tasks", + "schema": { + "executorid": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "executorid", + "constraints": { + "presence": false + } + }, + "taskname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "taskname", + "constraints": { + "presence": false + } + }, + "taskid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "taskid", + "constraints": { + "presence": false + } + }, + "completed": { + "type": "boolean", + "externalType": "boolean", + "autocolumn": false, + "name": "completed", + "constraints": { + "presence": false + } + }, + "qaid": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "qaid", + "constraints": { + "presence": false + } + }, + "products": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", + "name": "products", + "relationshipType": "many-to-many", + "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", + "type": "link", + "_id": "c3b91d00cd36c4cc1a347794725b9adbd", + "fieldName": "productid", + "throughFrom": "productid", + "throughTo": "taskid" + } + }, + "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "sourceType": "external", + "primaryDisplay": "taskname", + "sql": true, + "views": {} + } + }, + "tableAliases": { + "tasks": "a", + "products": "b", + "products_tasks": "c" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json b/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json new file mode 100644 index 0000000000..cec2fdb025 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/fetchManyToMany.json @@ -0,0 +1,109 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "entityId": "products", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.productname", + "a.productid", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid" + ] + }, + "filters": { + "string": {}, + "fuzzy": {}, + "range": {}, + "equal": {}, + "notEqual": {}, + "empty": {}, + "notEmpty": {}, + "contains": {}, + "notContains": {}, + "oneOf": {}, + "containsAny": {} + }, + "sort": { + "productname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "tasks", + "column": "tasks", + "through": "products_tasks", + "from": "productid", + "to": "taskid", + "fromPrimary": "productid", + "toPrimary": "taskid", + "aliases": { + "products_tasks": "c", + "tasks": "b", + "products": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", + "primary": [ + "productid" + ], + "name": "products", + "schema": { + "productname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "productname", + "constraints": { + "presence": false + } + }, + "productid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "productid", + "constraints": { + "presence": false + } + }, + "tasks": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", + "name": "tasks", + "relationshipType": "many-to-many", + "fieldName": "taskid", + "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", + "throughFrom": "taskid", + "throughTo": "productid", + "type": "link", + "main": true, + "_id": "c3b91d00cd36c4cc1a347794725b9adbd" + } + }, + "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "sourceType": "external", + "primaryDisplay": "productname" + } + }, + "tableAliases": { + "products": "a", + "tasks": "b", + "products_tasks": "c" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json new file mode 100644 index 0000000000..399cb0f4d2 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/filterByRelationship.json @@ -0,0 +1,94 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "products", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.productname", + "a.productid", + "b.executorid", + "b.taskname", + "b.taskid", + "b.completed", + "b.qaid" + ] + }, + "filters": { + "equal": { + "1:tasks.taskname": "assembling" + }, + "onEmptyFilter": "all" + }, + "sort": { + "productname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "tasks", + "column": "tasks", + "through": "products_tasks", + "from": "productid", + "to": "taskid", + "fromPrimary": "productid", + "toPrimary": "taskid" + } + ], + "tableAliases": { + "products_tasks": "c", + "tasks": "b", + "products": "a" + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products", + "primary": [ + "productid" + ], + "name": "products", + "schema": { + "productname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "productname", + "constraints": { + "presence": false + } + }, + "productid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "productid", + "constraints": { + "presence": false + } + }, + "tasks": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "tasks", + "relationshipType": "many-to-many", + "fieldName": "taskid", + "through": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__products_tasks", + "throughFrom": "taskid", + "throughTo": "productid", + "type": "link", + "main": true, + "_id": "ca6862d9ba09146dd8a68e3b5b7055a09" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "productname" + } + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json b/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json new file mode 100644 index 0000000000..2b5d156546 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/manyRelationshipFilters.json @@ -0,0 +1,202 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "entityId": "tasks", + "operation": "READ" + }, + "resource": { + "fields": [ + "a.executorid", + "a.taskname", + "a.taskid", + "a.completed", + "a.qaid", + "b.productname", + "b.productid", + "c.year", + "c.firstname", + "c.personid", + "c.address", + "c.age", + "c.type", + "c.city", + "c.lastname", + "c.year", + "c.firstname", + "c.personid", + "c.address", + "c.age", + "c.type", + "c.city", + "c.lastname" + ] + }, + "filters": { + "string": {}, + "fuzzy": {}, + "range": { + "1:persons.year": { + "low": 1990, + "high": 2147483647 + } + }, + "equal": { + "2:products.productname": "Computers" + }, + "notEqual": { + "3:completed": true + }, + "empty": {}, + "notEmpty": {}, + "contains": {}, + "notContains": {}, + "oneOf": {}, + "containsAny": {}, + "onEmptyFilter": "all" + }, + "sort": { + "taskname": { + "direction": "ASCENDING" + } + }, + "paginate": { + "limit": 100, + "page": 1 + }, + "relationships": [ + { + "tableName": "products", + "column": "products", + "through": "products_tasks", + "from": "taskid", + "to": "productid", + "fromPrimary": "taskid", + "toPrimary": "productid", + "aliases": { + "products_tasks": "d", + "products": "b", + "tasks": "a" + } + }, + { + "tableName": "persons", + "column": "tasksToExecute", + "from": "executorid", + "to": "personid", + "aliases": { + "persons": "c", + "tasks": "a" + } + }, + { + "tableName": "persons", + "column": "tasksToQA", + "from": "qaid", + "to": "personid", + "aliases": { + "persons": "c", + "tasks": "a" + } + } + ], + "extra": { + "idFilter": {} + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__tasks", + "primary": [ + "taskid" + ], + "name": "tasks", + "schema": { + "executorid": { + "type": "number", + "externalType": "integer", + "name": "executorid", + "constraints": { + "presence": false + }, + "autocolumn": true, + "autoReason": "foreign_key" + }, + "taskname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "taskname", + "constraints": { + "presence": false + } + }, + "taskid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "taskid", + "constraints": { + "presence": false + } + }, + "completed": { + "type": "boolean", + "externalType": "boolean", + "autocolumn": false, + "name": "completed", + "constraints": { + "presence": false + } + }, + "qaid": { + "type": "number", + "externalType": "integer", + "name": "qaid", + "constraints": { + "presence": false + } + }, + "products": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products", + "name": "products", + "relationshipType": "many-to-many", + "through": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__products_tasks", + "type": "link", + "_id": "c3b91d00cd36c4cc1a347794725b9adbd", + "fieldName": "productid", + "throughFrom": "productid", + "throughTo": "taskid" + }, + "tasksToExecute": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons", + "name": "tasksToExecute", + "relationshipType": "one-to-many", + "type": "link", + "_id": "c0f440590bda04f28846242156c1dd60b", + "foreignKey": "executorid", + "fieldName": "personid" + }, + "tasksToQA": { + "tableId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81__persons", + "name": "tasksToQA", + "relationshipType": "one-to-many", + "type": "link", + "_id": "c5fdf453a0ba743d58e29491d174c974b", + "foreignKey": "qaid", + "fieldName": "personid" + } + }, + "sourceId": "datasource_plus_44a967caf37a435f84fe01cd6dfe8f81", + "sourceType": "external", + "primaryDisplay": "taskname", + "sql": true, + "views": {} + } + }, + "tableAliases": { + "tasks": "a", + "products": "b", + "persons": "c", + "products_tasks": "d" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json b/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json new file mode 100644 index 0000000000..42c2a44335 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/updateRelationship.json @@ -0,0 +1,181 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "UPDATE" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname" + ] + }, + "filters": { + "equal": { + "personid": 5 + } + }, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "body": { + "year": 1990, + "firstname": "C", + "address": "A Street", + "age": 34, + "type": "designer", + "city": "London", + "lastname": "B" + }, + "extra": { + "idFilter": { + "equal": { + "personid": 5 + } + } + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "persons", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file diff --git a/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json b/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json new file mode 100644 index 0000000000..42c2a44335 --- /dev/null +++ b/packages/server/src/integrations/tests/sqlQueryJson/updateSimple.json @@ -0,0 +1,181 @@ +{ + "endpoint": { + "datasourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "entityId": "persons", + "operation": "UPDATE" + }, + "resource": { + "fields": [ + "a.year", + "a.firstname", + "a.personid", + "a.address", + "a.age", + "a.type", + "a.city", + "a.lastname" + ] + }, + "filters": { + "equal": { + "personid": 5 + } + }, + "relationships": [ + { + "tableName": "tasks", + "column": "QA", + "from": "personid", + "to": "qaid", + "aliases": { + "tasks": "b", + "persons": "a" + } + }, + { + "tableName": "tasks", + "column": "executor", + "from": "personid", + "to": "executorid", + "aliases": { + "tasks": "b", + "persons": "a" + } + } + ], + "body": { + "year": 1990, + "firstname": "C", + "address": "A Street", + "age": 34, + "type": "designer", + "city": "London", + "lastname": "B" + }, + "extra": { + "idFilter": { + "equal": { + "personid": 5 + } + } + }, + "meta": { + "table": { + "type": "table", + "_id": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__persons", + "primary": [ + "personid" + ], + "name": "persons", + "schema": { + "year": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "year", + "constraints": { + "presence": false + } + }, + "firstname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "firstname", + "constraints": { + "presence": false + } + }, + "personid": { + "type": "number", + "externalType": "integer", + "autocolumn": true, + "name": "personid", + "constraints": { + "presence": false + } + }, + "address": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "address", + "constraints": { + "presence": false + } + }, + "age": { + "type": "number", + "externalType": "integer", + "autocolumn": false, + "name": "age", + "constraints": { + "presence": false + } + }, + "type": { + "type": "options", + "externalType": "USER-DEFINED", + "autocolumn": false, + "name": "type", + "constraints": { + "presence": false, + "inclusion": [ + "support", + "designer", + "programmer", + "qa" + ] + } + }, + "city": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "city", + "constraints": { + "presence": false + } + }, + "lastname": { + "type": "string", + "externalType": "character varying", + "autocolumn": false, + "name": "lastname", + "constraints": { + "presence": false + } + }, + "QA": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "QA", + "relationshipType": "many-to-one", + "fieldName": "qaid", + "type": "link", + "main": true, + "_id": "ccb68481c80c34217a4540a2c6c27fe46", + "foreignKey": "personid" + }, + "executor": { + "tableId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7__tasks", + "name": "executor", + "relationshipType": "many-to-one", + "fieldName": "executorid", + "type": "link", + "main": true, + "_id": "c89530b9770d94bec851e062b5cff3001", + "foreignKey": "personid", + "tableName": "persons" + } + }, + "sourceId": "datasource_plus_8066e56456784eb2a00129d31be5c3e7", + "sourceType": "external", + "primaryDisplay": "firstname", + "views": {} + } + }, + "tableAliases": { + "persons": "a", + "tasks": "b" + } +} \ No newline at end of file diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts index d0d50395b2..1d4d4d0f71 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncApps.spec.ts @@ -13,7 +13,7 @@ describe("syncApps", () => { afterAll(config.end) it("runs successfully", async () => { - return config.doInContext(null, async () => { + return config.doInContext(undefined, async () => { // create the usage quota doc and mock usages await quotas.getQuotaUsage() await quotas.setUsage(3, StaticQuotaName.APPS, QuotaUsageType.STATIC) diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts index 75fa9f217e..93b7d4949b 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncCreators.spec.ts @@ -12,8 +12,8 @@ describe("syncCreators", () => { afterAll(config.end) it("syncs creators", async () => { - return config.doInContext(null, async () => { - await config.createUser({ admin: true }) + return config.doInContext(undefined, async () => { + await config.createUser({ admin: { global: true } }) await syncCreators.run() diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts index e644d605b6..730278683c 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncRows.spec.ts @@ -14,7 +14,7 @@ describe("syncRows", () => { afterAll(config.end) it("runs successfully", async () => { - return config.doInContext(null, async () => { + return config.doInContext(undefined, async () => { // create the usage quota doc and mock usages await quotas.getQuotaUsage() await quotas.setUsage(300, StaticQuotaName.ROWS, QuotaUsageType.STATIC) diff --git a/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts b/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts index f7500c8b4d..2731cc041d 100644 --- a/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts +++ b/packages/server/src/migrations/functions/usageQuotas/tests/syncUsers.spec.ts @@ -12,7 +12,7 @@ describe("syncUsers", () => { afterAll(config.end) it("syncs users", async () => { - return config.doInContext(null, async () => { + return config.doInContext(undefined, async () => { await config.createUser() await syncUsers.run() diff --git a/packages/server/src/migrations/tests/index.spec.ts b/packages/server/src/migrations/tests/index.spec.ts index c01040593a..236776cd3f 100644 --- a/packages/server/src/migrations/tests/index.spec.ts +++ b/packages/server/src/migrations/tests/index.spec.ts @@ -40,7 +40,7 @@ describe("migrations", () => { describe("backfill", () => { it("runs app db migration", async () => { - await config.doInContext(null, async () => { + await config.doInContext(undefined, async () => { await clearMigrations() await config.createAutomation() await config.createAutomation(structures.newAutomation()) @@ -93,18 +93,18 @@ describe("migrations", () => { }) it("runs global db migration", async () => { - await config.doInContext(null, async () => { + await config.doInContext(undefined, async () => { await clearMigrations() - const appId = config.prodAppId + const appId = config.getProdAppId() const roles = { [appId]: "role_12345" } await config.createUser({ - builder: false, - admin: true, + builder: { global: false }, + admin: { global: true }, roles, }) // admin only await config.createUser({ - builder: false, - admin: false, + builder: { global: false }, + admin: { global: false }, roles, }) // non admin non builder await config.createTable() diff --git a/packages/server/src/sdk/app/applications/import.ts b/packages/server/src/sdk/app/applications/import.ts index c3415bdb36..f712548fcb 100644 --- a/packages/server/src/sdk/app/applications/import.ts +++ b/packages/server/src/sdk/app/applications/import.ts @@ -85,7 +85,9 @@ async function getImportableDocuments(db: Database) { const docPromises = [] for (let docType of DocumentTypesToImport) { docPromises.push( - db.allDocs(dbCore.getDocParams(docType, null, { include_docs: true })) + db.allDocs( + dbCore.getDocParams(docType, null, { include_docs: true }) + ) ) } // map the responses to the document itself diff --git a/packages/server/src/sdk/app/applications/tests/sync.spec.ts b/packages/server/src/sdk/app/applications/tests/sync.spec.ts index 1d28ed977c..a53bdb0bd7 100644 --- a/packages/server/src/sdk/app/applications/tests/sync.spec.ts +++ b/packages/server/src/sdk/app/applications/tests/sync.spec.ts @@ -43,8 +43,8 @@ async function createUser(email: string, roles: UserRoles, builder?: boolean) { const user = await config.createUser({ email, roles, - builder: builder || false, - admin: false, + builder: { global: builder || false }, + admin: { global: false }, }) await context.doInContext(config.appId!, async () => { await events.user.created(user) @@ -55,10 +55,10 @@ async function createUser(email: string, roles: UserRoles, builder?: boolean) { async function removeUserRole(user: User) { const final = await config.globalUser({ ...user, - id: user._id, + _id: user._id, roles: {}, - builder: false, - admin: false, + builder: { global: false }, + admin: { global: false }, }) await context.doInContext(config.appId!, async () => { await events.user.updated(final) @@ -69,8 +69,8 @@ async function createGroupAndUser(email: string) { groupUser = await config.createUser({ email, roles: {}, - builder: false, - admin: false, + builder: { global: false }, + admin: { global: false }, }) group = await config.createGroup() await config.addUserToGroup(group._id!, groupUser._id!) diff --git a/packages/server/src/sdk/app/datasources/datasources.ts b/packages/server/src/sdk/app/datasources/datasources.ts index c71c3f1b31..fd0d291d91 100644 --- a/packages/server/src/sdk/app/datasources/datasources.ts +++ b/packages/server/src/sdk/app/datasources/datasources.ts @@ -229,7 +229,7 @@ export async function removeSecretSingle(datasource: Datasource) { } export function mergeConfigs(update: Datasource, old: Datasource) { - if (!update.config) { + if (!update.config || !old.config) { return update } // specific to REST datasources, fix the auth configs again if required diff --git a/packages/server/src/sdk/app/datasources/plus.ts b/packages/server/src/sdk/app/datasources/plus.ts index 04cd508863..31ec51c728 100644 --- a/packages/server/src/sdk/app/datasources/plus.ts +++ b/packages/server/src/sdk/app/datasources/plus.ts @@ -3,12 +3,33 @@ import { DatasourcePlus, IntegrationBase, Schema, + Table, } from "@budibase/types" import * as datasources from "./datasources" import tableSdk from "../tables" import { getIntegration } from "../../../integrations" import { context } from "@budibase/backend-core" +function checkForSchemaErrors(schema: Record) { + const errors: Record = {} + for (let [tableName, table] of Object.entries(schema)) { + if (tableName.includes(".")) { + errors[tableName] = "Table names containing dots are not supported." + } else { + const columnNames = Object.keys(table.schema) + const invalidColumnName = columnNames.find(columnName => + columnName.includes(".") + ) + if (invalidColumnName) { + errors[ + tableName + ] = `Column '${invalidColumnName}' is not supported as it contains a dot.` + } + } + } + return errors +} + export async function buildFilteredSchema( datasource: Datasource, filter?: string[] @@ -30,16 +51,19 @@ export async function buildFilteredSchema( filteredSchema.errors[key] = schema.errors[key] } } - return filteredSchema + + return { + ...filteredSchema, + errors: { + ...filteredSchema.errors, + ...checkForSchemaErrors(filteredSchema.tables), + }, + } } async function buildSchemaHelper(datasource: Datasource): Promise { const connector = (await getConnector(datasource)) as DatasourcePlus - const externalSchema = await connector.buildSchema( - datasource._id!, - datasource.entities! - ) - return externalSchema + return await connector.buildSchema(datasource._id!, datasource.entities!) } export async function getConnector( diff --git a/packages/server/src/sdk/app/rows/search.ts b/packages/server/src/sdk/app/rows/search.ts index 4b71179839..8b24f9bc5f 100644 --- a/packages/server/src/sdk/app/rows/search.ts +++ b/packages/server/src/sdk/app/rows/search.ts @@ -36,11 +36,13 @@ export async function search(options: SearchParams): Promise<{ export interface ExportRowsParams { tableId: string format: Format + delimiter?: string rowIds?: string[] columns?: string[] query?: SearchFilters sort?: string sortOrder?: SortOrder + customHeaders?: { [key: string]: string } } export interface ExportRowsResult { diff --git a/packages/server/src/sdk/app/rows/search/external.ts b/packages/server/src/sdk/app/rows/search/external.ts index 8465f997e3..e95b904767 100644 --- a/packages/server/src/sdk/app/rows/search/external.ts +++ b/packages/server/src/sdk/app/rows/search/external.ts @@ -11,7 +11,10 @@ import { import * as exporters from "../../../../api/controllers/view/exporters" import sdk from "../../../../sdk" import { handleRequest } from "../../../../api/controllers/row/external" -import { breakExternalTableId } from "../../../../integrations/utils" +import { + breakExternalTableId, + breakRowIdField, +} from "../../../../integrations/utils" import { cleanExportRows } from "../utils" import { utils } from "@budibase/shared-core" import { ExportRowsParams, ExportRowsResult } from "../search" @@ -52,6 +55,15 @@ export async function search(options: SearchParams) { } } + // Make sure oneOf _id queries decode the Row IDs + if (query?.oneOf?._id) { + const rowIds = query.oneOf._id + query.oneOf._id = rowIds.map((row: string) => { + const ids = breakRowIdField(row) + return ids[0] + }) + } + try { const table = await sdk.tables.getTable(tableId) options = searchInputMapping(table, options) @@ -101,7 +113,17 @@ export async function search(options: SearchParams) { export async function exportRows( options: ExportRowsParams ): Promise { - const { tableId, format, columns, rowIds, query, sort, sortOrder } = options + const { + tableId, + format, + columns, + rowIds, + query, + sort, + sortOrder, + delimiter, + customHeaders, + } = options const { datasourceId, tableName } = breakExternalTableId(tableId) let requestQuery: SearchFilters = {} @@ -109,9 +131,7 @@ export async function exportRows( requestQuery = { oneOf: { _id: rowIds.map((row: string) => { - const ids = JSON.parse( - decodeURI(row).replace(/'/g, `"`).replace(/%2C/g, ",") - ) + const ids = breakRowIdField(row) if (ids.length > 1) { throw new HTTPError( "Export data does not support composite keys.", @@ -153,12 +173,17 @@ export async function exportRows( rows = result.rows } - let exportRows = cleanExportRows(rows, schema, format, columns) + let exportRows = cleanExportRows(rows, schema, format, columns, customHeaders) let content: string switch (format) { case exporters.Format.CSV: - content = exporters.csv(headers ?? Object.keys(schema), exportRows) + content = exporters.csv( + headers ?? Object.keys(schema), + exportRows, + delimiter, + customHeaders + ) break case exporters.Format.JSON: content = exporters.json(exportRows) diff --git a/packages/server/src/sdk/app/rows/search/internal.ts b/packages/server/src/sdk/app/rows/search/internal.ts index 22cb3985b7..2d3c32e02e 100644 --- a/packages/server/src/sdk/app/rows/search/internal.ts +++ b/packages/server/src/sdk/app/rows/search/internal.ts @@ -84,7 +84,17 @@ export async function search(options: SearchParams) { export async function exportRows( options: ExportRowsParams ): Promise { - const { tableId, format, rowIds, columns, query, sort, sortOrder } = options + const { + tableId, + format, + rowIds, + columns, + query, + sort, + sortOrder, + delimiter, + customHeaders, + } = options const db = context.getAppDB() const table = await sdk.tables.getTable(tableId) @@ -124,11 +134,16 @@ export async function exportRows( rows = result } - let exportRows = cleanExportRows(rows, schema, format, columns) + let exportRows = cleanExportRows(rows, schema, format, columns, customHeaders) if (format === Format.CSV) { return { fileName: "export.csv", - content: csv(headers ?? Object.keys(rows[0]), exportRows), + content: csv( + headers ?? Object.keys(rows[0]), + exportRows, + delimiter, + customHeaders + ), } } else if (format === Format.JSON) { return { diff --git a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts index 1aaea8e258..bae84592ca 100644 --- a/packages/server/src/sdk/app/rows/search/tests/external.spec.ts +++ b/packages/server/src/sdk/app/rows/search/tests/external.spec.ts @@ -21,10 +21,11 @@ jest.unmock("mysql2/promise") jest.setTimeout(30000) -describe.skip("external", () => { +describe("external search", () => { const config = new TestConfiguration() let externalDatasource: Datasource, tableData: Table + const rows: Row[] = [] beforeAll(async () => { const container = await new GenericContainer("mysql") @@ -89,67 +90,81 @@ describe.skip("external", () => { }, }, } + + const table = await config.createExternalTable({ + ...tableData, + sourceId: externalDatasource._id, + }) + for (let i = 0; i < 10; i++) { + rows.push( + await config.createRow({ + tableId: table._id, + name: generator.first(), + surname: generator.last(), + age: generator.age(), + address: generator.address(), + }) + ) + } }) - describe("search", () => { - const rows: Row[] = [] - beforeAll(async () => { - const table = await config.createExternalTable({ - ...tableData, - sourceId: externalDatasource._id, - }) - for (let i = 0; i < 10; i++) { - rows.push( - await config.createRow({ - tableId: table._id, - name: generator.first(), - surname: generator.last(), - age: generator.age(), - address: generator.address(), - }) - ) + it("default search returns all the data", async () => { + await config.doInContext(config.appId, async () => { + const tableId = config.table!._id! + + const searchParams: SearchParams = { + tableId, + query: {}, } + const result = await search(searchParams) + + expect(result.rows).toHaveLength(10) + expect(result.rows).toEqual( + expect.arrayContaining(rows.map(r => expect.objectContaining(r))) + ) }) + }) - it("default search returns all the data", async () => { - await config.doInContext(config.appId, async () => { - const tableId = config.table!._id! + it("querying by fields will always return data attribute columns", async () => { + await config.doInContext(config.appId, async () => { + const tableId = config.table!._id! - const searchParams: SearchParams = { - tableId, - query: {}, - } - const result = await search(searchParams) + const searchParams: SearchParams = { + tableId, + query: {}, + fields: ["name", "age"], + } + const result = await search(searchParams) - expect(result.rows).toHaveLength(10) - expect(result.rows).toEqual( - expect.arrayContaining(rows.map(r => expect.objectContaining(r))) + expect(result.rows).toHaveLength(10) + expect(result.rows).toEqual( + expect.arrayContaining( + rows.map(r => ({ + ...expectAnyExternalColsAttributes, + name: r.name, + age: r.age, + })) ) - }) + ) }) + }) - it("querying by fields will always return data attribute columns", async () => { - await config.doInContext(config.appId, async () => { - const tableId = config.table!._id! + it("will decode _id in oneOf query", async () => { + await config.doInContext(config.appId, async () => { + const tableId = config.table!._id! - const searchParams: SearchParams = { - tableId, - query: {}, - fields: ["name", "age"], - } - const result = await search(searchParams) + const searchParams: SearchParams = { + tableId, + query: { + oneOf: { + _id: ["%5B1%5D", "%5B4%5D", "%5B8%5D"], + }, + }, + } + const result = await search(searchParams) - expect(result.rows).toHaveLength(10) - expect(result.rows).toEqual( - expect.arrayContaining( - rows.map(r => ({ - ...expectAnyExternalColsAttributes, - name: r.name, - age: r.age, - })) - ) - ) - }) + expect(result.rows).toHaveLength(3) + expect(result.rows.map(row => row.id)).toEqual([1, 4, 8]) }) }) }) diff --git a/packages/server/src/sdk/app/rows/search/utils.ts b/packages/server/src/sdk/app/rows/search/utils.ts index 4eee3cea41..5d93dcaca2 100644 --- a/packages/server/src/sdk/app/rows/search/utils.ts +++ b/packages/server/src/sdk/app/rows/search/utils.ts @@ -1,6 +1,5 @@ import { FieldType, - FieldTypeSubtypes, SearchParams, Table, DocumentType, diff --git a/packages/server/src/sdk/app/rows/tests/internal.spec.ts b/packages/server/src/sdk/app/rows/tests/internal.spec.ts index dda41d5720..877bd1e6dc 100644 --- a/packages/server/src/sdk/app/rows/tests/internal.spec.ts +++ b/packages/server/src/sdk/app/rows/tests/internal.spec.ts @@ -81,7 +81,7 @@ describe("sdk >> rows >> internal", () => { const response = await internalSdk.save( table._id!, row, - config.user._id + config.getUser()._id ) expect(response).toEqual({ @@ -98,7 +98,10 @@ describe("sdk >> rows >> internal", () => { }, }) - const persistedRow = await config.getRow(table._id!, response.row._id!) + const persistedRow = await config.api.row.get( + table._id!, + response.row._id! + ) expect(persistedRow).toEqual({ ...row, type: "row", @@ -129,7 +132,7 @@ describe("sdk >> rows >> internal", () => { const response = await internalSdk.save( table._id!, row, - config.user._id + config.getUser()._id ) expect(response).toEqual({ @@ -157,7 +160,10 @@ describe("sdk >> rows >> internal", () => { }, }) - const persistedRow = await config.getRow(table._id!, response.row._id!) + const persistedRow = await config.api.row.get( + table._id!, + response.row._id! + ) expect(persistedRow).toEqual({ ...row, type: "row", @@ -190,15 +196,15 @@ describe("sdk >> rows >> internal", () => { await config.doInContext(config.appId, async () => { for (const row of makeRows(5)) { - await internalSdk.save(table._id!, row, config.user._id) + await internalSdk.save(table._id!, row, config.getUser()._id) } await Promise.all( makeRows(10).map(row => - internalSdk.save(table._id!, row, config.user._id) + internalSdk.save(table._id!, row, config.getUser()._id) ) ) for (const row of makeRows(5)) { - await internalSdk.save(table._id!, row, config.user._id) + await internalSdk.save(table._id!, row, config.getUser()._id) } }) diff --git a/packages/server/src/sdk/app/rows/utils.ts b/packages/server/src/sdk/app/rows/utils.ts index 14868a4013..6e3e25364e 100644 --- a/packages/server/src/sdk/app/rows/utils.ts +++ b/packages/server/src/sdk/app/rows/utils.ts @@ -1,12 +1,55 @@ import cloneDeep from "lodash/cloneDeep" import validateJs from "validate.js" -import { FieldType, Row, Table, TableSchema } from "@budibase/types" +import { + Datasource, + DatasourcePlusQueryResponse, + FieldType, + QueryJson, + Row, + SourceName, + Table, + TableSchema, +} from "@budibase/types" import { makeExternalQuery } from "../../../integrations/base/query" import { Format } from "../../../api/controllers/view/exporters" import sdk from "../.." import { isRelationshipColumn } from "../../../db/utils" +import { SqlClient } from "../../../integrations/utils" -export async function getDatasourceAndQuery(json: any) { +const SQL_CLIENT_SOURCE_MAP: Record = { + [SourceName.POSTGRES]: SqlClient.POSTGRES, + [SourceName.MYSQL]: SqlClient.MY_SQL, + [SourceName.SQL_SERVER]: SqlClient.MS_SQL, + [SourceName.ORACLE]: SqlClient.ORACLE, + [SourceName.DYNAMODB]: undefined, + [SourceName.MONGODB]: undefined, + [SourceName.ELASTICSEARCH]: undefined, + [SourceName.COUCHDB]: undefined, + [SourceName.S3]: undefined, + [SourceName.AIRTABLE]: undefined, + [SourceName.ARANGODB]: undefined, + [SourceName.REST]: undefined, + [SourceName.FIRESTORE]: undefined, + [SourceName.GOOGLE_SHEETS]: undefined, + [SourceName.REDIS]: undefined, + [SourceName.SNOWFLAKE]: undefined, + [SourceName.BUDIBASE]: undefined, +} + +export function getSQLClient(datasource: Datasource): SqlClient { + if (!datasource.isSQL) { + throw new Error("Cannot get SQL Client for non-SQL datasource") + } + const lookup = SQL_CLIENT_SOURCE_MAP[datasource.source] + if (lookup) { + return lookup + } + throw new Error("Unable to determine client for SQL datasource") +} + +export async function getDatasourceAndQuery( + json: QueryJson +): DatasourcePlusQueryResponse { const datasourceId = json.endpoint.datasourceId const datasource = await sdk.datasources.get(datasourceId) return makeExternalQuery(datasource, json) @@ -16,7 +59,8 @@ export function cleanExportRows( rows: any[], schema: TableSchema, format: string, - columns?: string[] + columns?: string[], + customHeaders: { [key: string]: string } = {} ) { let cleanRows = [...rows] @@ -44,11 +88,27 @@ export function cleanExportRows( } } } + } else if (format === Format.JSON) { + // Replace row keys with custom headers + for (let row of cleanRows) { + renameKeys(customHeaders, row) + } } return cleanRows } +function renameKeys(keysMap: { [key: string]: any }, row: any) { + for (const key in keysMap) { + Object.defineProperty( + row, + keysMap[key], + Object.getOwnPropertyDescriptor(row, key) || {} + ) + delete row[key] + } +} + function isForeignKey(key: string, table: Table) { const relationships = Object.values(table.schema).filter(isRelationshipColumn) return relationships.some( diff --git a/packages/server/src/sdk/app/tables/external/index.ts b/packages/server/src/sdk/app/tables/external/index.ts index 9a2bed0da2..0ace19d00e 100644 --- a/packages/server/src/sdk/app/tables/external/index.ts +++ b/packages/server/src/sdk/app/tables/external/index.ts @@ -3,6 +3,7 @@ import { Operation, RelationshipType, RenameColumn, + AddColumn, Table, TableRequest, ViewV2, @@ -32,7 +33,7 @@ import * as viewSdk from "../../views" export async function save( datasourceId: string, update: Table, - opts?: { tableId?: string; renaming?: RenameColumn } + opts?: { tableId?: string; renaming?: RenameColumn; adding?: AddColumn } ) { let tableToSave: TableRequest = { ...update, @@ -165,8 +166,17 @@ export async function save( // remove the rename prop delete tableToSave._rename + + // if adding a new column, we need to rebuild the schema for that table to get the 'externalType' of the column + if (opts?.adding) { + datasource.entities[tableToSave.name] = ( + await datasourceSdk.buildFilteredSchema(datasource, [tableToSave.name]) + ).tables[tableToSave.name] + } else { + datasource.entities[tableToSave.name] = tableToSave + } + // store it into couch now for budibase reference - datasource.entities[tableToSave.name] = tableToSave await db.put(populateExternalTableSchemas(datasource)) // Since tables are stored inside datasources, we need to notify clients diff --git a/packages/server/src/sdk/users/tests/utils.spec.ts b/packages/server/src/sdk/users/tests/utils.spec.ts index efe790d49b..6f1c5afd3d 100644 --- a/packages/server/src/sdk/users/tests/utils.spec.ts +++ b/packages/server/src/sdk/users/tests/utils.spec.ts @@ -22,15 +22,18 @@ describe("syncGlobalUsers", () => { expect(metadata).toHaveLength(1) expect(metadata).toEqual([ expect.objectContaining({ - _id: db.generateUserMetadataID(config.user._id), + _id: db.generateUserMetadataID(config.getUser()._id!), }), ]) }) }) it("admin and builders users are synced", async () => { - const user1 = await config.createUser({ admin: true }) - const user2 = await config.createUser({ admin: false, builder: true }) + const user1 = await config.createUser({ admin: { global: true } }) + const user2 = await config.createUser({ + admin: { global: false }, + builder: { global: true }, + }) await config.doInContext(config.appId, async () => { expect(await rawUserMetadata()).toHaveLength(1) await syncGlobalUsers() @@ -51,7 +54,10 @@ describe("syncGlobalUsers", () => { }) it("app users are not synced if not specified", async () => { - const user = await config.createUser({ admin: false, builder: false }) + const user = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) await config.doInContext(config.appId, async () => { await syncGlobalUsers() @@ -68,8 +74,14 @@ describe("syncGlobalUsers", () => { it("app users are added when group is assigned to app", async () => { await config.doInTenant(async () => { const group = await proSdk.groups.save(structures.userGroups.userGroup()) - const user1 = await config.createUser({ admin: false, builder: false }) - const user2 = await config.createUser({ admin: false, builder: false }) + const user1 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) + const user2 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) await proSdk.groups.addUsers(group.id, [user1._id!, user2._id!]) await config.doInContext(config.appId, async () => { @@ -103,8 +115,14 @@ describe("syncGlobalUsers", () => { it("app users are removed when app is removed from user group", async () => { await config.doInTenant(async () => { const group = await proSdk.groups.save(structures.userGroups.userGroup()) - const user1 = await config.createUser({ admin: false, builder: false }) - const user2 = await config.createUser({ admin: false, builder: false }) + const user1 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) + const user2 = await config.createUser({ + admin: { global: false }, + builder: { global: false }, + }) await proSdk.groups.updateGroupApps(group.id, { appsToAdd: [ { appId: config.prodAppId!, roleId: roles.BUILTIN_ROLE_IDS.BASIC }, diff --git a/packages/server/src/startup.ts b/packages/server/src/startup.ts index f9b5974eb2..abe931f503 100644 --- a/packages/server/src/startup.ts +++ b/packages/server/src/startup.ts @@ -38,6 +38,7 @@ async function initRoutes(app: Koa) { // api routes app.use(api.router.routes()) + app.use(api.router.allowedMethods()) } async function initPro() { diff --git a/packages/server/src/tests/jestEnv.ts b/packages/server/src/tests/jestEnv.ts index 4763208c54..7c58470e9b 100644 --- a/packages/server/src/tests/jestEnv.ts +++ b/packages/server/src/tests/jestEnv.ts @@ -10,3 +10,4 @@ process.env.MOCK_REDIS = "1" process.env.PLATFORM_URL = "http://localhost:10000" process.env.REDIS_PASSWORD = "budibase" process.env.BUDIBASE_VERSION = "0.0.0+jest" +process.env.WORKER_URL = "http://localhost:10000" diff --git a/packages/server/src/tests/utilities/TestConfiguration.ts b/packages/server/src/tests/utilities/TestConfiguration.ts index 1e238576e2..d17535a72f 100644 --- a/packages/server/src/tests/utilities/TestConfiguration.ts +++ b/packages/server/src/tests/utilities/TestConfiguration.ts @@ -49,25 +49,31 @@ import { AuthToken, Automation, CreateViewRequest, + Ctx, Datasource, FieldType, INTERNAL_TABLE_SOURCE_ID, + Layout, + Query, RelationshipFieldMetadata, RelationshipType, Row, + Screen, SearchParams, SourceName, Table, TableSourceType, User, - UserRoles, + UserCtx, View, + Webhook, WithRequired, } from "@budibase/types" import API from "./api" import { cloneDeep } from "lodash" import jwt, { Secret } from "jsonwebtoken" +import { Server } from "http" mocks.licenses.init(pro) @@ -82,27 +88,23 @@ export interface TableToBuild extends Omit { } export default class TestConfiguration { - server: any - request: supertest.SuperTest | undefined + server?: Server + request?: supertest.SuperTest started: boolean - appId: string | null - allApps: any[] + appId?: string + allApps: App[] app?: App - prodApp: any - prodAppId: any - user: any - userMetadataId: any + prodApp?: App + prodAppId?: string + user?: User + userMetadataId?: string table?: Table - automation: any + automation?: Automation datasource?: Datasource tenantId?: string api: API csrfToken?: string - private get globalUserId() { - return this.user._id - } - constructor(openServer = true) { if (openServer) { // use a random port because it doesn't matter @@ -114,7 +116,7 @@ export default class TestConfiguration { } else { this.started = false } - this.appId = null + this.appId = undefined this.allApps = [] this.api = new API(this) @@ -125,46 +127,86 @@ export default class TestConfiguration { } getApp() { + if (!this.app) { + throw new Error("app has not been initialised, call config.init() first") + } return this.app } getProdApp() { + if (!this.prodApp) { + throw new Error( + "prodApp has not been initialised, call config.init() first" + ) + } return this.prodApp } getAppId() { if (!this.appId) { - throw "appId has not been initialised properly" + throw new Error( + "appId has not been initialised, call config.init() first" + ) } - return this.appId } getProdAppId() { + if (!this.prodAppId) { + throw new Error( + "prodAppId has not been initialised, call config.init() first" + ) + } return this.prodAppId } + getUser(): User { + if (!this.user) { + throw new Error("User has not been initialised, call config.init() first") + } + return this.user + } + getUserDetails() { + const user = this.getUser() return { - globalId: this.globalUserId, - email: this.user.email, - firstName: this.user.firstName, - lastName: this.user.lastName, + globalId: user._id!, + email: user.email, + firstName: user.firstName, + lastName: user.lastName, } } + getAutomation() { + if (!this.automation) { + throw new Error( + "automation has not been initialised, call config.init() first" + ) + } + return this.automation + } + + getDatasource() { + if (!this.datasource) { + throw new Error( + "datasource has not been initialised, call config.init() first" + ) + } + return this.datasource + } + async doInContext( - appId: string | null, + appId: string | undefined, task: () => Promise ): Promise { - if (!appId) { - appId = this.appId - } - const tenant = this.getTenantId() return tenancy.doInTenant(tenant, () => { + if (!appId) { + appId = this.appId + } + // check if already in a context - if (context.getAppId() == null && appId !== null) { + if (context.getAppId() == null && appId) { return context.doInAppContext(appId, async () => { return task() }) @@ -257,9 +299,23 @@ export default class TestConfiguration { } } + async withUser(user: User, f: () => Promise) { + const oldUser = this.user + this.user = user + try { + return await f() + } finally { + this.user = oldUser + } + } + // UTILS - _req(body: any, params: any, controlFunc: any) { + _req | void, Res>( + handler: (ctx: UserCtx) => Promise, + body?: Req, + params?: Record + ): Promise { // create a fake request ctx const request: any = {} const appId = this.appId @@ -278,110 +334,66 @@ export default class TestConfiguration { throw new Error(`Error ${status} - ${message}`) } return this.doInContext(appId, async () => { - await controlFunc(request) + await handler(request) return request.body }) } // USER / AUTH - async globalUser( - config: { - id?: string - firstName?: string - lastName?: string - builder?: boolean - admin?: boolean - email?: string - roles?: any - } = {} - ): Promise { + async globalUser(config: Partial = {}): Promise { const { - id = `us_${newid()}`, + _id = `us_${newid()}`, firstName = generator.first(), lastName = generator.last(), - builder = true, - admin = false, - email = generator.email(), - roles, + builder = { global: true }, + admin = { global: false }, + email = generator.email({ domain: "example.com" }), + tenantId = this.getTenantId(), + roles = {}, } = config const db = tenancy.getTenantDB(this.getTenantId()) - let existing + let existing: Partial = {} try { - existing = await db.get(id) + existing = await db.get(_id) } catch (err) { - existing = { email } + // ignore } const user: User = { - _id: id, + _id, ...existing, - roles: roles || {}, - tenantId: this.getTenantId(), + ...config, + _rev: existing._rev, + email, + roles, + tenantId, firstName, lastName, + builder, + admin, } - await sessions.createASession(id, { - sessionId: "sessionid", + await sessions.createASession(_id, { + sessionId: this.sessionIdForUser(_id), tenantId: this.getTenantId(), csrfToken: this.csrfToken, }) - if (builder) { - user.builder = { global: true } - } else { - user.builder = { global: false } - } - if (admin) { - user.admin = { global: true } - } else { - user.admin = { global: false } - } const resp = await db.put(user) + await cache.user.invalidateUser(_id) return { _rev: resp.rev, ...user, } } - async createUser( - user: { - id?: string - firstName?: string - lastName?: string - email?: string - builder?: boolean - admin?: boolean - roles?: UserRoles - } = {} - ): Promise { - const { - id, - firstName = generator.first(), - lastName = generator.last(), - email = generator.email(), - builder = true, - admin, - roles, - } = user - - const globalId = !id ? `us_${Math.random()}` : `us_${id}` - const resp = await this.globalUser({ - id: globalId, - firstName, - lastName, - email, - builder, - admin, - roles: roles || {}, - }) - await cache.user.invalidateUser(globalId) - return resp + async createUser(user: Partial = {}): Promise { + return await this.globalUser(user) } async createGroup(roleId: string = roles.BUILTIN_ROLE_IDS.BASIC) { return context.doInTenant(this.tenantId!, async () => { const baseGroup = structures.userGroups.userGroup() baseGroup.roles = { - [this.prodAppId]: roleId, + [this.getProdAppId()]: roleId, } const { id, rev } = await pro.sdk.groups.save(baseGroup) return { @@ -404,8 +416,22 @@ export default class TestConfiguration { }) } - async login({ roleId, userId, builder, prodApp = false }: any = {}) { - const appId = prodApp ? this.prodAppId : this.appId + sessionIdForUser(userId: string): string { + return `sessionid-${userId}` + } + + async login({ + roleId, + userId, + builder, + prodApp, + }: { + roleId?: string + userId: string + builder: boolean + prodApp: boolean + }) { + const appId = prodApp ? this.getProdAppId() : this.getAppId() return context.doInAppContext(appId, async () => { userId = !userId ? `us_uuid1` : userId if (!this.request) { @@ -414,19 +440,19 @@ export default class TestConfiguration { // make sure the user exists in the global DB if (roleId !== roles.BUILTIN_ROLE_IDS.PUBLIC) { await this.globalUser({ - id: userId, - builder, - roles: { [this.prodAppId]: roleId }, + _id: userId, + builder: { global: builder }, + roles: { [appId]: roleId || roles.BUILTIN_ROLE_IDS.BASIC }, }) } await sessions.createASession(userId, { - sessionId: "sessionid", + sessionId: this.sessionIdForUser(userId), tenantId: this.getTenantId(), }) // have to fake this const authObj = { userId, - sessionId: "sessionid", + sessionId: this.sessionIdForUser(userId), tenantId: this.getTenantId(), } const authToken = jwt.sign(authObj, coreEnv.JWT_SECRET as Secret) @@ -445,9 +471,10 @@ export default class TestConfiguration { defaultHeaders(extras = {}, prodApp = false) { const tenantId = this.getTenantId() + const user = this.getUser() const authObj: AuthToken = { - userId: this.globalUserId, - sessionId: "sessionid", + userId: user._id!, + sessionId: this.sessionIdForUser(user._id!), tenantId, } const authToken = jwt.sign(authObj, coreEnv.JWT_SECRET as Secret) @@ -485,7 +512,7 @@ export default class TestConfiguration { async basicRoleHeaders() { return await this.roleHeaders({ - email: generator.email(), + email: generator.email({ domain: "example.com" }), builder: false, prodApp: true, roleId: roles.BUILTIN_ROLE_IDS.BASIC, @@ -493,12 +520,12 @@ export default class TestConfiguration { } async roleHeaders({ - email = generator.email(), + email = generator.email({ domain: "example.com" }), roleId = roles.BUILTIN_ROLE_IDS.ADMIN, builder = false, prodApp = true, } = {}) { - return this.login({ email, roleId, builder, prodApp }) + return this.login({ userId: email, roleId, builder, prodApp }) } // TENANCY @@ -521,18 +548,22 @@ export default class TestConfiguration { this.tenantId = structures.tenant.id() this.user = await this.globalUser() - this.userMetadataId = generateUserMetadataID(this.user._id) + this.userMetadataId = generateUserMetadataID(this.user._id!) return this.createApp(appName) } - doInTenant(task: any) { + doInTenant(task: () => T) { return context.doInTenant(this.getTenantId(), task) } // API - async generateApiKey(userId = this.user._id) { + async generateApiKey(userId?: string) { + const user = this.getUser() + if (!userId) { + userId = user._id! + } const db = tenancy.getTenantDB(this.getTenantId()) const id = dbCore.generateDevInfoID(userId) let devInfo: any @@ -552,29 +583,29 @@ export default class TestConfiguration { async createApp(appName: string, url?: string): Promise { // create dev app // clear any old app - this.appId = null - this.app = await context.doInTenant(this.tenantId!, async () => { - const app = await this._req( - { name: appName, url }, - null, - appController.create - ) - this.appId = app.appId! - return app - }) - return await context.doInAppContext(this.getAppId(), async () => { + this.appId = undefined + this.app = await context.doInTenant( + this.tenantId!, + async () => + (await this._req(appController.create, { + name: appName, + url, + })) as App + ) + this.appId = this.app.appId + return await context.doInAppContext(this.app.appId!, async () => { // create production app this.prodApp = await this.publish() this.allApps.push(this.prodApp) - this.allApps.push(this.app) + this.allApps.push(this.app!) return this.app! }) } async publish() { - await this._req(null, null, deployController.publishApp) + await this._req(deployController.publishApp) // @ts-ignore const prodAppId = this.getAppId().replace("_dev", "") this.prodAppId = prodAppId @@ -586,13 +617,11 @@ export default class TestConfiguration { } async unpublish() { - const response = await this._req( - null, - { appId: this.appId }, - appController.unpublish - ) - this.prodAppId = null - this.prodApp = null + const response = await this._req(appController.unpublish, { + appId: this.appId, + }) + this.prodAppId = undefined + this.prodApp = undefined return response } @@ -698,11 +727,6 @@ export default class TestConfiguration { return this.api.row.save(tableId, config) } - async getRow(tableId: string, rowId: string): Promise { - const res = await this.api.row.get(tableId, rowId) - return res.body - } - async getRows(tableId: string) { if (!tableId && this.table) { tableId = this.table._id! @@ -720,8 +744,7 @@ export default class TestConfiguration { // ROLE async createRole(config?: any) { - config = config || basicRole() - return this._req(config, null, roleController.save) + return this._req(roleController.save, config || basicRole()) } // VIEW @@ -734,7 +757,7 @@ export default class TestConfiguration { tableId: this.table!._id, name: generator.guid(), } - return this._req(view, null, viewController.v1.save) + return this._req(viewController.v1.save, view) } async createView( @@ -758,40 +781,38 @@ export default class TestConfiguration { // AUTOMATION - async createAutomation(config?: any) { + async createAutomation(config?: Automation) { config = config || basicAutomation() if (config._rev) { delete config._rev } - this.automation = ( - await this._req(config, null, automationController.create) - ).automation + const res = await this._req(automationController.create, config) + this.automation = res.automation return this.automation } async getAllAutomations() { - return this._req(null, null, automationController.fetch) + return this._req(automationController.fetch) } - async deleteAutomation(automation?: any) { + async deleteAutomation(automation?: Automation) { automation = automation || this.automation if (!automation) { return } - return this._req( - null, - { id: automation._id, rev: automation._rev }, - automationController.destroy - ) + return this._req(automationController.destroy, undefined, { + id: automation._id, + rev: automation._rev, + }) } - async createWebhook(config?: any) { + async createWebhook(config?: Webhook) { if (!this.automation) { throw "Must create an automation before creating webhook." } - config = config || basicWebhook(this.automation._id) + config = config || basicWebhook(this.automation._id!) - return (await this._req(config, null, webhookController.save)).webhook + return (await this._req(webhookController.save, config)).webhook } // DATASOURCE @@ -813,7 +834,7 @@ export default class TestConfiguration { return { ...this.datasource, _id: this.datasource!._id! } } - async restDatasource(cfg?: any) { + async restDatasource(cfg?: Record) { return this.createDatasource({ datasource: { ...basicDatasource().datasource, @@ -870,26 +891,25 @@ export default class TestConfiguration { // QUERY - async createQuery(config?: any) { - if (!this.datasource && !config) { - throw "No datasource created for query." - } - config = config || basicQuery(this.datasource!._id!) - return this._req(config, null, queryController.save) + async createQuery(config?: Query) { + return this._req( + queryController.save, + config || basicQuery(this.getDatasource()._id!) + ) } // SCREEN - async createScreen(config?: any) { + async createScreen(config?: Screen) { config = config || basicScreen() - return this._req(config, null, screenController.save) + return this._req(screenController.save, config) } // LAYOUT - async createLayout(config?: any) { + async createLayout(config?: Layout) { config = config || basicLayout() - return await this._req(config, null, layoutController.save) + return await this._req(layoutController.save, config) } } diff --git a/packages/server/src/tests/utilities/api/application.ts b/packages/server/src/tests/utilities/api/application.ts index 9c784bade1..d28df223b5 100644 --- a/packages/server/src/tests/utilities/api/application.ts +++ b/packages/server/src/tests/utilities/api/application.ts @@ -1,24 +1,150 @@ -import { Response } from "supertest" -import { App } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { + App, + PublishResponse, + type CreateAppRequest, + type FetchAppDefinitionResponse, + type FetchAppPackageResponse, + DuplicateAppResponse, +} from "@budibase/types" +import { Expectations, TestAPI } from "./base" +import { AppStatus } from "../../../db/utils" +import { constants } from "@budibase/backend-core" export class ApplicationAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) + create = async ( + app: CreateAppRequest, + expectations?: Expectations + ): Promise => { + const files = app.templateFile ? { templateFile: app.templateFile } : {} + delete app.templateFile + return await this._post("/api/applications", { + fields: app, + files, + expectations, + }) } - getRaw = async (appId: string): Promise => { - const result = await this.request - .get(`/api/applications/${appId}/appPackage`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - return result + delete = async ( + appId: string, + expectations?: Expectations + ): Promise => { + await this._delete(`/api/applications/${appId}`, { expectations }) } - get = async (appId: string): Promise => { - const result = await this.getRaw(appId) - return result.body.application as App + publish = async (appId: string): Promise => { + return await this._post( + `/api/applications/${appId}/publish`, + { + // While the publish endpoint does take an :appId parameter, it doesn't + // use it. It uses the appId from the context. + headers: { + [constants.Header.APP_ID]: appId, + }, + } + ) + } + + unpublish = async (appId: string): Promise => { + await this._post(`/api/applications/${appId}/unpublish`, { + expectations: { status: 204 }, + }) + } + + sync = async ( + appId: string, + expectations?: Expectations + ): Promise<{ message: string }> => { + return await this._post<{ message: string }>( + `/api/applications/${appId}/sync`, + { expectations } + ) + } + + get = async (appId: string, expectations?: Expectations): Promise => { + return await this._get(`/api/applications/${appId}`, { + // While the get endpoint does take an :appId parameter, it doesn't use + // it. It uses the appId from the context. + headers: { + [constants.Header.APP_ID]: appId, + }, + expectations, + }) + } + + duplicateApp = async ( + appId: string, + fields: object, + expectations?: Expectations + ): Promise => { + let headers = { + ...this.config.defaultHeaders(), + [constants.Header.APP_ID]: appId, + } + return this._post(`/api/applications/${appId}/duplicate`, { + headers, + fields, + expectations, + }) + } + + getDefinition = async ( + appId: string, + expectations?: Expectations + ): Promise => { + return await this._get( + `/api/applications/${appId}/definition`, + { expectations } + ) + } + + getAppPackage = async ( + appId: string, + expectations?: Expectations + ): Promise => { + return await this._get( + `/api/applications/${appId}/appPackage`, + { expectations } + ) + } + + update = async ( + appId: string, + app: { name?: string; url?: string }, + expectations?: Expectations + ): Promise => { + return await this._put(`/api/applications/${appId}`, { + fields: app, + expectations, + }) + } + + updateClient = async ( + appId: string, + expectations?: Expectations + ): Promise => { + await this._post(`/api/applications/${appId}/client/update`, { + // While the updateClient endpoint does take an :appId parameter, it doesn't + // use it. It uses the appId from the context. + headers: { + [constants.Header.APP_ID]: appId, + }, + expectations, + }) + } + + revertClient = async (appId: string): Promise => { + await this._post(`/api/applications/${appId}/client/revert`, { + // While the revertClient endpoint does take an :appId parameter, it doesn't + // use it. It uses the appId from the context. + headers: { + [constants.Header.APP_ID]: appId, + }, + }) + } + + fetch = async ({ status }: { status?: AppStatus } = {}): Promise => { + return await this._get("/api/applications", { + query: { status }, + }) } } diff --git a/packages/server/src/tests/utilities/api/attachment.ts b/packages/server/src/tests/utilities/api/attachment.ts index a466f1a67e..bb33ef04bb 100644 --- a/packages/server/src/tests/utilities/api/attachment.ts +++ b/packages/server/src/tests/utilities/api/attachment.ts @@ -1,35 +1,16 @@ -import { - APIError, - Datasource, - ProcessAttachmentResponse, -} from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { ProcessAttachmentResponse } from "@budibase/types" +import { Expectations, TestAPI } from "./base" import fs from "fs" export class AttachmentAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - process = async ( name: string, file: Buffer | fs.ReadStream | string, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const result = await this.request - .post(`/api/attachments/process`) - .attach("file", file, name) - .set(this.config.defaultHeaders()) - - if (result.statusCode !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - result.statusCode - }, body: ${JSON.stringify(result.body)}` - ) - } - - return result.body + return await this._post(`/api/attachments/process`, { + files: { file: { name, file } }, + expectations, + }) } } diff --git a/packages/server/src/tests/utilities/api/backup.ts b/packages/server/src/tests/utilities/api/backup.ts index 8cd1e58a29..7c01b57108 100644 --- a/packages/server/src/tests/utilities/api/backup.ts +++ b/packages/server/src/tests/utilities/api/backup.ts @@ -2,42 +2,38 @@ import { CreateAppBackupResponse, ImportAppBackupResponse, } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { Expectations, TestAPI } from "./base" export class BackupAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - - exportBasicBackup = async (appId: string) => { - const result = await this.request - .post(`/api/backups/export?appId=${appId}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /application\/gzip/) - .expect(200) - return { - body: result.body as Buffer, - headers: result.headers, + exportBasicBackup = async (appId: string, expectations?: Expectations) => { + const exp = { + ...expectations, + headers: { + ...expectations?.headers, + "Content-Type": "application/gzip", + }, } + return await this._post(`/api/backups/export`, { + query: { appId }, + expectations: exp, + }) } - createBackup = async (appId: string) => { - const result = await this.request - .post(`/api/apps/${appId}/backups`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - return result.body as CreateAppBackupResponse + createBackup = async (appId: string, expectations?: Expectations) => { + return await this._post( + `/api/apps/${appId}/backups`, + { expectations } + ) } waitForBackupToComplete = async (appId: string, backupId: string) => { for (let i = 0; i < 10; i++) { await new Promise(resolve => setTimeout(resolve, 1000)) - const result = await this.request - .get(`/api/apps/${appId}/backups/${backupId}/file`) - .set(this.config.defaultHeaders()) - if (result.status === 200) { + const response = await this._requestRaw( + "get", + `/api/apps/${appId}/backups/${backupId}/file` + ) + if (response.status === 200) { return } } @@ -46,13 +42,12 @@ export class BackupAPI extends TestAPI { importBackup = async ( appId: string, - backupId: string + backupId: string, + expectations?: Expectations ): Promise => { - const result = await this.request - .post(`/api/apps/${appId}/backups/${backupId}/import`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - return result.body as ImportAppBackupResponse + return await this._post( + `/api/apps/${appId}/backups/${backupId}/import`, + { expectations } + ) } } diff --git a/packages/server/src/tests/utilities/api/base.ts b/packages/server/src/tests/utilities/api/base.ts index 34120277c0..3f534fba86 100644 --- a/packages/server/src/tests/utilities/api/base.ts +++ b/packages/server/src/tests/utilities/api/base.ts @@ -1,17 +1,196 @@ import TestConfiguration from "../TestConfiguration" -import { SuperTest, Test } from "supertest" +import { SuperTest, Test, Response } from "supertest" +import { ReadStream } from "fs" -export interface TestAPIOpts { - headers?: any +type Headers = Record +type Method = "get" | "post" | "put" | "patch" | "delete" + +export interface AttachedFile { + name: string + file: Buffer | ReadStream | string +} + +function isAttachedFile(file: any): file is AttachedFile { + if (file === undefined) { + return false + } + const attachedFile = file as AttachedFile + return ( + Object.hasOwnProperty.call(attachedFile, "file") && + Object.hasOwnProperty.call(attachedFile, "name") + ) +} + +export interface Expectations { status?: number + headers?: Record + headersNotPresent?: string[] + body?: Record +} + +export interface RequestOpts { + headers?: Headers + query?: Record + body?: Record + fields?: Record + files?: Record< + string, + Buffer | ReadStream | string | AttachedFile | undefined + > + expectations?: Expectations + publicUser?: boolean } export abstract class TestAPI { config: TestConfiguration request: SuperTest - protected constructor(config: TestConfiguration) { + constructor(config: TestConfiguration) { this.config = config this.request = config.request! } + + protected _get = async (url: string, opts?: RequestOpts): Promise => { + return await this._request("get", url, opts) + } + + protected _post = async (url: string, opts?: RequestOpts): Promise => { + return await this._request("post", url, opts) + } + + protected _put = async (url: string, opts?: RequestOpts): Promise => { + return await this._request("put", url, opts) + } + + protected _patch = async (url: string, opts?: RequestOpts): Promise => { + return await this._request("patch", url, opts) + } + + protected _delete = async ( + url: string, + opts?: RequestOpts + ): Promise => { + return await this._request("delete", url, opts) + } + + protected _requestRaw = async ( + method: "get" | "post" | "put" | "patch" | "delete", + url: string, + opts?: RequestOpts + ): Promise => { + const { + headers = {}, + query = {}, + body, + fields = {}, + files = {}, + expectations, + publicUser = false, + } = opts || {} + const { status = 200 } = expectations || {} + const expectHeaders = expectations?.headers || {} + + if (status !== 204 && !expectHeaders["Content-Type"]) { + expectHeaders["Content-Type"] = /^application\/json/ + } + + let queryParams = [] + for (const [key, value] of Object.entries(query)) { + if (value) { + queryParams.push(`${key}=${value}`) + } + } + if (queryParams.length) { + url += `?${queryParams.join("&")}` + } + + const headersFn = publicUser + ? this.config.publicHeaders.bind(this.config) + : this.config.defaultHeaders.bind(this.config) + let request = this.request[method](url).set( + headersFn({ + "x-budibase-include-stacktrace": "true", + }) + ) + if (headers) { + request = request.set(headers) + } + if (body) { + request = request.send(body) + } + for (const [key, value] of Object.entries(fields)) { + request = request.field(key, value) + } + + for (const [key, value] of Object.entries(files)) { + if (isAttachedFile(value)) { + request = request.attach(key, value.file, value.name) + } else { + request = request.attach(key, value as any) + } + } + if (expectations?.headers) { + for (const [key, value] of Object.entries(expectations.headers)) { + if (value === undefined) { + throw new Error( + `Got an undefined expected value for header "${key}", if you want to check for the absence of a header, use headersNotPresent` + ) + } + request = request.expect(key, value as any) + } + } + + return await request + } + + protected _request = async ( + method: Method, + url: string, + opts?: RequestOpts + ): Promise => { + const { expectations } = opts || {} + const { status = 200 } = expectations || {} + + const response = await this._requestRaw(method, url, opts) + + if (response.status !== status) { + let message = `Expected status ${status} but got ${response.status}` + + const stack = response.body.stack + delete response.body.stack + + if (response.body) { + message += `\n\nBody:` + const body = JSON.stringify(response.body, null, 2) + for (const line of body.split("\n")) { + message += `\n⏐ ${line}` + } + } + + if (stack) { + message += `\n\nStack from request handler:` + for (const line of stack.split("\n")) { + message += `\n⏐ ${line}` + } + } + + throw new Error(message) + } + + if (expectations?.headersNotPresent) { + for (const header of expectations.headersNotPresent) { + if (response.headers[header]) { + throw new Error( + `Expected header ${header} not to be present, found value "${response.headers[header]}"` + ) + } + } + } + + if (expectations?.body) { + expect(response.body).toMatchObject(expectations.body) + } + + return response.body + } } diff --git a/packages/server/src/tests/utilities/api/datasource.ts b/packages/server/src/tests/utilities/api/datasource.ts index bcd7a71089..06aa9b4e1e 100644 --- a/packages/server/src/tests/utilities/api/datasource.ts +++ b/packages/server/src/tests/utilities/api/datasource.ts @@ -1,63 +1,48 @@ import { - CreateDatasourceRequest, Datasource, VerifyDatasourceRequest, + CreateDatasourceResponse, + UpdateDatasourceResponse, + UpdateDatasourceRequest, } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" -import supertest from "supertest" +import { Expectations, TestAPI } from "./base" export class DatasourceAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - - create = async ( + create = async ( config: Datasource, - { - expectStatus, - rawResponse, - }: { expectStatus?: number; rawResponse?: B } = {} - ): Promise => { - const body: CreateDatasourceRequest = { - datasource: config, - tablesFilter: [], - } - const result = await this.request - .post(`/api/datasources`) - .send(body) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus || 200) - if (rawResponse) { - return result as any - } - return result.body.datasource + expectations?: Expectations + ): Promise => { + const response = await this._post( + `/api/datasources`, + { + body: { + datasource: config, + tablesFilter: [], + }, + expectations, + } + ) + return response.datasource } update = async ( - datasource: Datasource, - { expectStatus } = { expectStatus: 200 } + datasource: UpdateDatasourceRequest, + expectations?: Expectations ): Promise => { - const result = await this.request - .put(`/api/datasources/${datasource._id}`) - .send(datasource) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return result.body.datasource as Datasource + const response = await this._put( + `/api/datasources/${datasource._id}`, + { body: datasource, expectations } + ) + return response.datasource } verify = async ( data: VerifyDatasourceRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ) => { - const result = await this.request - .post(`/api/datasources/verify`) - .send(data) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return result + return await this._post(`/api/datasources/verify`, { + body: data, + expectations, + }) } } diff --git a/packages/server/src/tests/utilities/api/index.ts b/packages/server/src/tests/utilities/api/index.ts index fdcec3098d..d66acd86fd 100644 --- a/packages/server/src/tests/utilities/api/index.ts +++ b/packages/server/src/tests/utilities/api/index.ts @@ -11,6 +11,7 @@ import { BackupAPI } from "./backup" import { AttachmentAPI } from "./attachment" import { UserAPI } from "./user" import { QueryAPI } from "./query" +import { RoleAPI } from "./role" export default class API { table: TableAPI @@ -25,6 +26,7 @@ export default class API { attachment: AttachmentAPI user: UserAPI query: QueryAPI + roles: RoleAPI constructor(config: TestConfiguration) { this.table = new TableAPI(config) @@ -39,5 +41,6 @@ export default class API { this.attachment = new AttachmentAPI(config) this.user = new UserAPI(config) this.query = new QueryAPI(config) + this.roles = new RoleAPI(config) } } diff --git a/packages/server/src/tests/utilities/api/legacyView.ts b/packages/server/src/tests/utilities/api/legacyView.ts index 63981cec5e..38ef70d62a 100644 --- a/packages/server/src/tests/utilities/api/legacyView.ts +++ b/packages/server/src/tests/utilities/api/legacyView.ts @@ -1,16 +1,8 @@ -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { Expectations, TestAPI } from "./base" +import { Row } from "@budibase/types" export class LegacyViewAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - - get = async (id: string, { expectStatus } = { expectStatus: 200 }) => { - return await this.request - .get(`/api/views/${id}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) + get = async (id: string, expectations?: Expectations) => { + return await this._get(`/api/views/${id}`, { expectations }) } } diff --git a/packages/server/src/tests/utilities/api/permission.ts b/packages/server/src/tests/utilities/api/permission.ts index ffa89e88f9..986796d9a1 100644 --- a/packages/server/src/tests/utilities/api/permission.ts +++ b/packages/server/src/tests/utilities/api/permission.ts @@ -1,52 +1,39 @@ -import { AnyDocument, PermissionLevel } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { + AddPermissionRequest, + AddPermissionResponse, + GetResourcePermsResponse, + RemovePermissionRequest, + RemovePermissionResponse, +} from "@budibase/types" +import { Expectations, TestAPI } from "./base" export class PermissionAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) + get = async (resourceId: string, expectations?: Expectations) => { + return await this._get( + `/api/permission/${resourceId}`, + { expectations } + ) } - get = async ( - resourceId: string, - { expectStatus } = { expectStatus: 200 } - ) => { - return this.request - .get(`/api/permission/${resourceId}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - } - - set = async ( - { - roleId, - resourceId, - level, - }: { roleId: string; resourceId: string; level: PermissionLevel }, - { expectStatus } = { expectStatus: 200 } - ): Promise => { - const res = await this.request - .post(`/api/permission/${roleId}/${resourceId}/${level}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return res.body + add = async ( + request: AddPermissionRequest, + expectations?: Expectations + ): Promise => { + const { roleId, resourceId, level } = request + return await this._post( + `/api/permission/${roleId}/${resourceId}/${level}`, + { expectations } + ) } revoke = async ( - { - roleId, - resourceId, - level, - }: { roleId: string; resourceId: string; level: PermissionLevel }, - { expectStatus } = { expectStatus: 200 } + request: RemovePermissionRequest, + expectations?: Expectations ) => { - const res = await this.request - .delete(`/api/permission/${roleId}/${resourceId}/${level}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return res + const { roleId, resourceId, level } = request + return await this._delete( + `/api/permission/${roleId}/${resourceId}/${level}`, + { expectations } + ) } } diff --git a/packages/server/src/tests/utilities/api/query.ts b/packages/server/src/tests/utilities/api/query.ts index b0eac5c8b7..32866314ff 100644 --- a/packages/server/src/tests/utilities/api/query.ts +++ b/packages/server/src/tests/utilities/api/query.ts @@ -1,60 +1,32 @@ -import TestConfiguration from "../TestConfiguration" import { Query, - QueryPreview, - type ExecuteQueryRequest, - type ExecuteQueryResponse, + ExecuteQueryRequest, + ExecuteQueryResponse, + PreviewQueryRequest, + PreviewQueryResponse, } from "@budibase/types" import { TestAPI } from "./base" export class QueryAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - create = async (body: Query): Promise => { - const res = await this.request - .post(`/api/queries`) - .set(this.config.defaultHeaders()) - .send(body) - .expect("Content-Type", /json/) - - if (res.status !== 200) { - throw new Error(JSON.stringify(res.body)) - } - - return res.body as Query + return await this._post(`/api/queries`, { body }) } execute = async ( queryId: string, body?: ExecuteQueryRequest ): Promise => { - const res = await this.request - .post(`/api/v2/queries/${queryId}`) - .set(this.config.defaultHeaders()) - .send(body) - .expect("Content-Type", /json/) - - if (res.status !== 200) { - throw new Error(JSON.stringify(res.body)) - } - - return res.body + return await this._post( + `/api/v2/queries/${queryId}`, + { + body, + } + ) } - previewQuery = async (queryPreview: QueryPreview) => { - const res = await this.request - .post(`/api/queries/preview`) - .send(queryPreview) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - - if (res.status !== 200) { - throw new Error(JSON.stringify(res.body)) - } - - return res.body + previewQuery = async (queryPreview: PreviewQueryRequest) => { + return await this._post(`/api/queries/preview`, { + body: queryPreview, + }) } } diff --git a/packages/server/src/tests/utilities/api/role.ts b/packages/server/src/tests/utilities/api/role.ts new file mode 100644 index 0000000000..4defbc1220 --- /dev/null +++ b/packages/server/src/tests/utilities/api/role.ts @@ -0,0 +1,41 @@ +import { + AccessibleRolesResponse, + FetchRolesResponse, + FindRoleResponse, + SaveRoleRequest, + SaveRoleResponse, +} from "@budibase/types" +import { Expectations, TestAPI } from "./base" + +export class RoleAPI extends TestAPI { + fetch = async (expectations?: Expectations) => { + return await this._get(`/api/roles`, { + expectations, + }) + } + + find = async (roleId: string, expectations?: Expectations) => { + return await this._get(`/api/roles/${roleId}`, { + expectations, + }) + } + + save = async (body: SaveRoleRequest, expectations?: Expectations) => { + return await this._post(`/api/roles`, { + body, + expectations, + }) + } + + destroy = async (roleId: string, expectations?: Expectations) => { + return await this._delete(`/api/roles/${roleId}`, { + expectations, + }) + } + + accesssible = async (expectations?: Expectations) => { + return await this._get(`/api/roles/accessible`, { + expectations, + }) + } +} diff --git a/packages/server/src/tests/utilities/api/row.ts b/packages/server/src/tests/utilities/api/row.ts index 936c906f9f..86664574cb 100644 --- a/packages/server/src/tests/utilities/api/row.ts +++ b/packages/server/src/tests/utilities/api/row.ts @@ -8,162 +8,140 @@ import { BulkImportResponse, SearchRowResponse, SearchParams, + DeleteRowRequest, + DeleteRows, + DeleteRow, + ExportRowsResponse, } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { Expectations, TestAPI } from "./base" export class RowAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - get = async ( sourceId: string, rowId: string, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ) => { - const request = this.request - .get(`/api/${sourceId}/rows/${rowId}`) - .set(this.config.defaultHeaders()) - .expect(expectStatus) - if (expectStatus !== 404) { - request.expect("Content-Type", /json/) - } - return request + return await this._get(`/api/${sourceId}/rows/${rowId}`, { + expectations, + }) } getEnriched = async ( sourceId: string, rowId: string, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ) => { - const request = this.request - .get(`/api/${sourceId}/${rowId}/enrich`) - .set(this.config.defaultHeaders()) - .expect(expectStatus) - if (expectStatus !== 404) { - request.expect("Content-Type", /json/) - } - return request + return await this._get(`/api/${sourceId}/${rowId}/enrich`, { + expectations, + }) } save = async ( tableId: string, row: SaveRowRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const resp = await this.request - .post(`/api/${tableId}/rows`) - .send(row) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - if (resp.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - resp.status - }, body: ${JSON.stringify(resp.body)}` - ) - } - return resp.body as Row + return await this._post(`/api/${tableId}/rows`, { + body: row, + expectations, + }) } validate = async ( sourceId: string, row: SaveRowRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const resp = await this.request - .post(`/api/${sourceId}/rows/validate`) - .send(row) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return resp.body as ValidateResponse + return await this._post( + `/api/${sourceId}/rows/validate`, + { + body: row, + expectations, + } + ) } patch = async ( sourceId: string, row: PatchRowRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - let resp = await this.request - .patch(`/api/${sourceId}/rows`) - .send(row) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - if (resp.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - resp.status - }, body: ${JSON.stringify(resp.body)}` - ) - } - return resp.body as Row + return await this._patch(`/api/${sourceId}/rows`, { + body: row, + expectations, + }) } delete = async ( sourceId: string, - rows: Row | string | (Row | string)[], - { expectStatus } = { expectStatus: 200 } + row: DeleteRow, + expectations?: Expectations ) => { - return this.request - .delete(`/api/${sourceId}/rows`) - .send(Array.isArray(rows) ? { rows } : rows) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) + return await this._delete(`/api/${sourceId}/rows`, { + body: row, + expectations, + }) + } + + bulkDelete = async ( + sourceId: string, + body: DeleteRows, + expectations?: Expectations + ) => { + return await this._delete(`/api/${sourceId}/rows`, { + body, + expectations, + }) } fetch = async ( sourceId: string, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const request = this.request - .get(`/api/${sourceId}/rows`) - .set(this.config.defaultHeaders()) - .expect(expectStatus) - - return (await request).body + return await this._get(`/api/${sourceId}/rows`, { + expectations, + }) } exportRows = async ( tableId: string, body: ExportRowsRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ) => { - const request = this.request - .post(`/api/${tableId}/rows/exportRows?format=json`) - .set(this.config.defaultHeaders()) - .send(body) - .expect("Content-Type", /json/) - .expect(expectStatus) - return request + const response = await this._requestRaw( + "post", + `/api/${tableId}/rows/exportRows`, + { + body, + query: { format: "json" }, + expectations, + } + ) + return response.text } bulkImport = async ( tableId: string, body: BulkImportRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - let request = this.request - .post(`/api/tables/${tableId}/import`) - .send(body) - .set(this.config.defaultHeaders()) - .expect(expectStatus) - return (await request).body + return await this._post( + `/api/tables/${tableId}/import`, + { + body, + expectations, + } + ) } search = async ( sourceId: string, params?: SearchParams, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const request = this.request - .post(`/api/${sourceId}/search`) - .send(params) - .set(this.config.defaultHeaders()) - .expect(expectStatus) - - return (await request).body + return await this._post(`/api/${sourceId}/search`, { + body: params, + expectations, + }) } } diff --git a/packages/server/src/tests/utilities/api/screen.ts b/packages/server/src/tests/utilities/api/screen.ts index 9245ffe4ba..c8d3e647d8 100644 --- a/packages/server/src/tests/utilities/api/screen.ts +++ b/packages/server/src/tests/utilities/api/screen.ts @@ -1,18 +1,8 @@ -import TestConfiguration from "../TestConfiguration" import { Screen } from "@budibase/types" -import { TestAPI } from "./base" +import { Expectations, TestAPI } from "./base" export class ScreenAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - - list = async (): Promise => { - const res = await this.request - .get(`/api/screens`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(200) - return res.body as Screen[] + list = async (expectations?: Expectations): Promise => { + return await this._get(`/api/screens`, { expectations }) } } diff --git a/packages/server/src/tests/utilities/api/table.ts b/packages/server/src/tests/utilities/api/table.ts index 5a9654e3bc..49105a3883 100644 --- a/packages/server/src/tests/utilities/api/table.ts +++ b/packages/server/src/tests/utilities/api/table.ts @@ -5,74 +5,38 @@ import { SaveTableResponse, Table, } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { Expectations, TestAPI } from "./base" export class TableAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - save = async ( data: SaveTableRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const res = await this.request - .post(`/api/tables`) - .send(data) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body + return await this._post("/api/tables", { + body: data, + expectations, + }) } - fetch = async ( - { expectStatus } = { expectStatus: 200 } - ): Promise => { - const res = await this.request - .get(`/api/tables`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return res.body + fetch = async (expectations?: Expectations): Promise => { + return await this._get("/api/tables", { expectations }) } get = async ( tableId: string, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise
=> { - const res = await this.request - .get(`/api/tables/${tableId}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return res.body + return await this._get
(`/api/tables/${tableId}`, { expectations }) } migrate = async ( tableId: string, data: MigrateRequest, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const res = await this.request - .post(`/api/tables/${tableId}/migrate`) - .send(data) - .set(this.config.defaultHeaders()) - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - return res.body + return await this._post(`/api/tables/${tableId}/migrate`, { + body: data, + expectations, + }) } } diff --git a/packages/server/src/tests/utilities/api/user.ts b/packages/server/src/tests/utilities/api/user.ts index 2ed23c0461..bb3eae0542 100644 --- a/packages/server/src/tests/utilities/api/user.ts +++ b/packages/server/src/tests/utilities/api/user.ts @@ -4,154 +4,79 @@ import { Flags, UserMetadata, } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { Expectations, TestAPI } from "./base" import { DocumentInsertResponse } from "@budibase/nano" export class UserAPI extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - fetch = async ( - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const res = await this.request - .get(`/api/users/metadata`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body + return await this._get("/api/users/metadata", { + expectations, + }) } find = async ( id: string, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const res = await this.request - .get(`/api/users/metadata/${id}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body + return await this._get( + `/api/users/metadata/${id}`, + { + expectations, + } + ) } update = async ( user: UserMetadata, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const res = await this.request - .put(`/api/users/metadata`) - .set(this.config.defaultHeaders()) - .send(user) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body as DocumentInsertResponse + return await this._put("/api/users/metadata", { + body: user, + expectations, + }) } updateSelf = async ( user: UserMetadata, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const res = await this.request - .post(`/api/users/metadata/self`) - .set(this.config.defaultHeaders()) - .send(user) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body as DocumentInsertResponse + return await this._post( + "/api/users/metadata/self", + { + body: user, + expectations, + } + ) } destroy = async ( id: string, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise<{ message: string }> => { - const res = await this.request - .delete(`/api/users/metadata/${id}`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body as { message: string } + return await this._delete<{ message: string }>( + `/api/users/metadata/${id}`, + { + expectations, + } + ) } setFlag = async ( flag: string, value: any, - { expectStatus } = { expectStatus: 200 } + expectations?: Expectations ): Promise<{ message: string }> => { - const res = await this.request - .post(`/api/users/flags`) - .set(this.config.defaultHeaders()) - .send({ flag, value }) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body as { message: string } + return await this._post<{ message: string }>(`/api/users/flags`, { + body: { flag, value }, + expectations, + }) } - getFlags = async ( - { expectStatus } = { expectStatus: 200 } - ): Promise => { - const res = await this.request - .get(`/api/users/flags`) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - - if (res.status !== expectStatus) { - throw new Error( - `Expected status ${expectStatus} but got ${ - res.status - } with body ${JSON.stringify(res.body)}` - ) - } - - return res.body as Flags + getFlags = async (expectations?: Expectations): Promise => { + return await this._get(`/api/users/flags`, { + expectations, + }) } } diff --git a/packages/server/src/tests/utilities/api/viewV2.ts b/packages/server/src/tests/utilities/api/viewV2.ts index 92a6d394bf..d4539e00b1 100644 --- a/packages/server/src/tests/utilities/api/viewV2.ts +++ b/packages/server/src/tests/utilities/api/viewV2.ts @@ -3,21 +3,16 @@ import { UpdateViewRequest, ViewV2, SearchViewRowRequest, + PaginatedSearchRowResponse, } from "@budibase/types" -import TestConfiguration from "../TestConfiguration" -import { TestAPI } from "./base" +import { Expectations, TestAPI } from "./base" import { generator } from "@budibase/backend-core/tests" -import { Response } from "superagent" import sdk from "../../../sdk" export class ViewV2API extends TestAPI { - constructor(config: TestConfiguration) { - super(config) - } - create = async ( viewData?: Partial, - { expectStatus } = { expectStatus: 201 } + expectations?: Expectations ): Promise => { let tableId = viewData?.tableId if (!tableId && !this.config.table) { @@ -30,43 +25,36 @@ export class ViewV2API extends TestAPI { name: generator.guid(), ...viewData, } - const result = await this.request - .post(`/api/v2/views`) - .send(view) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - return result.body.data as ViewV2 + + const exp: Expectations = { + status: 201, + ...expectations, + } + + const resp = await this._post<{ data: ViewV2 }>("/api/v2/views", { + body: view, + expectations: exp, + }) + return resp.data } update = async ( view: UpdateViewRequest, - { - expectStatus, - handleResponse, - }: { - expectStatus: number - handleResponse?: (response: Response) => void - } = { expectStatus: 200 } + expectations?: Expectations ): Promise => { - const result = await this.request - .put(`/api/v2/views/${view.id}`) - .send(view) - .set(this.config.defaultHeaders()) - .expect("Content-Type", /json/) - .expect(expectStatus) - - if (handleResponse) { - handleResponse(result) - } - return result.body.data as ViewV2 + const resp = await this._put<{ data: ViewV2 }>(`/api/v2/views/${view.id}`, { + body: view, + expectations, + }) + return resp.data } - delete = async (viewId: string, { expectStatus } = { expectStatus: 204 }) => { - return this.request - .delete(`/api/v2/views/${viewId}`) - .set(this.config.defaultHeaders()) - .expect(expectStatus) + delete = async (viewId: string, expectations?: Expectations) => { + const exp = { + status: 204, + ...expectations, + } + return await this._delete(`/api/v2/views/${viewId}`, { expectations: exp }) } get = async (viewId: string) => { @@ -78,17 +66,29 @@ export class ViewV2API extends TestAPI { search = async ( viewId: string, params?: SearchViewRowRequest, - { expectStatus = 200, usePublicUser = false } = {} + expectations?: Expectations ) => { - return this.request - .post(`/api/v2/views/${viewId}/search`) - .send(params) - .set( - usePublicUser - ? this.config.publicHeaders() - : this.config.defaultHeaders() - ) - .expect("Content-Type", /json/) - .expect(expectStatus) + return await this._post( + `/api/v2/views/${viewId}/search`, + { + body: params, + expectations, + } + ) + } + + publicSearch = async ( + viewId: string, + params?: SearchViewRowRequest, + expectations?: Expectations + ) => { + return await this._post( + `/api/v2/views/${viewId}/search`, + { + body: params, + expectations, + publicUser: true, + } + ) } } diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts index 2fecf15fd6..5b50bd1175 100644 --- a/packages/server/src/tests/utilities/structures.ts +++ b/packages/server/src/tests/utilities/structures.ts @@ -22,6 +22,8 @@ import { INTERNAL_TABLE_SOURCE_ID, TableSourceType, Query, + Webhook, + WebhookActionType, } from "@budibase/types" import { LoopInput, LoopStepType } from "../../definitions/automations" @@ -407,12 +409,12 @@ export function basicLayout() { return cloneDeep(EMPTY_LAYOUT) } -export function basicWebhook(automationId: string) { +export function basicWebhook(automationId: string): Webhook { return { live: true, name: "webhook", action: { - type: "automation", + type: WebhookActionType.AUTOMATION, target: automationId, }, } diff --git a/packages/shared-core/src/filters.ts b/packages/shared-core/src/filters.ts index 46d765a7b5..d9fe533c88 100644 --- a/packages/shared-core/src/filters.ts +++ b/packages/shared-core/src/filters.ts @@ -390,24 +390,41 @@ export const runLuceneQuery = (docs: any[], query?: SearchQuery) => { } ) - // Match a document against all criteria const docMatch = (doc: any) => { - return ( - stringMatch(doc) && - fuzzyMatch(doc) && - rangeMatch(doc) && - equalMatch(doc) && - notEqualMatch(doc) && - emptyMatch(doc) && - notEmptyMatch(doc) && - oneOf(doc) && - contains(doc) && - containsAny(doc) && - notContains(doc) - ) - } + const filterFunctions: Record boolean> = + { + string: stringMatch, + fuzzy: fuzzyMatch, + range: rangeMatch, + equal: equalMatch, + notEqual: notEqualMatch, + empty: emptyMatch, + notEmpty: notEmptyMatch, + oneOf: oneOf, + contains: contains, + containsAny: containsAny, + notContains: notContains, + } - // Process all docs + const activeFilterKeys: SearchQueryOperators[] = Object.entries(query || {}) + .filter( + ([key, value]: [string, any]) => + !["allOr", "onEmptyFilter"].includes(key) && + value && + Object.keys(value as Record).length > 0 + ) + .map(([key]) => key as any) + + const results: boolean[] = activeFilterKeys.map(filterKey => { + return filterFunctions[filterKey]?.(doc) ?? false + }) + + if (query!.allOr) { + return results.some(result => result === true) + } else { + return results.every(result => result === true) + } + } return docs.filter(docMatch) } diff --git a/packages/shared-core/src/tests/filters.test.ts b/packages/shared-core/src/tests/filters.test.ts index 8586d58777..de969562af 100644 --- a/packages/shared-core/src/tests/filters.test.ts +++ b/packages/shared-core/src/tests/filters.test.ts @@ -47,10 +47,7 @@ describe("runLuceneQuery", () => { }, ] - function buildQuery( - filterKey: string, - value: { [key: string]: any } - ): SearchQuery { + function buildQuery(filters: { [filterKey: string]: any }): SearchQuery { const query: SearchQuery = { string: {}, fuzzy: {}, @@ -63,8 +60,13 @@ describe("runLuceneQuery", () => { notContains: {}, oneOf: {}, containsAny: {}, + allOr: false, } - query[filterKey as SearchQueryOperators] = value + + for (const filterKey in filters) { + query[filterKey as SearchQueryOperators] = filters[filterKey] + } + return query } @@ -73,16 +75,17 @@ describe("runLuceneQuery", () => { }) it("should return matching rows for equal filter", () => { - const query = buildQuery("equal", { - order_status: 4, + const query = buildQuery({ + equal: { order_status: 4 }, }) expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) }) it("should return matching row for notEqual filter", () => { - const query = buildQuery("notEqual", { - order_status: 4, + const query = buildQuery({ + notEqual: { order_status: 4 }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) @@ -90,48 +93,56 @@ describe("runLuceneQuery", () => { expect( runLuceneQuery( docs, - buildQuery("fuzzy", { - description: "sm", + buildQuery({ + fuzzy: { description: "sm" }, }) ).map(row => row.description) ).toEqual(["Small box"]) expect( runLuceneQuery( docs, - buildQuery("string", { - description: "SM", + buildQuery({ + string: { description: "SM" }, }) ).map(row => row.description) ).toEqual(["Small box"]) }) it("should return rows within a range filter", () => { - const query = buildQuery("range", { - customer_id: { - low: 500, - high: 1000, + const query = buildQuery({ + range: { + customer_id: { + low: 500, + high: 1000, + }, }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) it("should return rows with numeric strings within a range filter", () => { - const query = buildQuery("range", { - customer_id: { - low: "500", - high: "1000", + const query = buildQuery({ + range: { + customer_id: { + low: "500", + high: "1000", + }, }, }) expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3]) }) it("should return rows with ISO date strings within a range filter", () => { - const query = buildQuery("range", { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", + const query = buildQuery({ + range: { + order_date: { + low: "2016-01-04T00:00:00.000Z", + high: "2016-01-11T00:00:00.000Z", + }, }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) }) @@ -150,40 +161,88 @@ describe("runLuceneQuery", () => { label: "", }, ] - const query = buildQuery("range", { - order_date: { - low: "2016-01-04T00:00:00.000Z", - high: "2016-01-11T00:00:00.000Z", + + const query = buildQuery({ + range: { + order_date: { + low: "2016-01-04T00:00:00.000Z", + high: "2016-01-11T00:00:00.000Z", + }, }, }) + expect(runLuceneQuery(docs, query)).toEqual(docs) }) it("should return rows with matches on empty filter", () => { - const query = buildQuery("empty", { - label: null, + const query = buildQuery({ + empty: { + label: null, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1]) }) it("should return rows with matches on notEmpty filter", () => { - const query = buildQuery("notEmpty", { - label: null, + const query = buildQuery({ + notEmpty: { + label: null, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3]) }) test.each([[523, 259], "523,259"])( "should return rows with matches on numeric oneOf filter", input => { - let query = buildQuery("oneOf", { - customer_id: input, + const query = buildQuery({ + oneOf: { + customer_id: input, + }, }) + expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([ 259, 523, ]) } ) + + test.each([ + [false, []], + [true, [1, 2, 3]], + ])("should return %s if allOr is %s ", (allOr, expectedResult) => { + const query = buildQuery({ + allOr, + oneOf: { staff_id: [10] }, + contains: { description: ["box"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual( + expectedResult + ) + }) + + it("should return matching results if allOr is true and only one filter matches with different operands", () => { + const query = buildQuery({ + allOr: true, + equal: { order_status: 4 }, + oneOf: { label: ["FRAGILE"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2]) + }) + + it("should handle when a value is null or undefined", () => { + const query = buildQuery({ + allOr: true, + equal: { order_status: null }, + oneOf: { label: ["FRAGILE"] }, + }) + + expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2]) + }) }) describe("buildLuceneQuery", () => { diff --git a/packages/types/package.json b/packages/types/package.json index ce4fce95fb..558e55a632 100644 --- a/packages/types/package.json +++ b/packages/types/package.json @@ -18,7 +18,7 @@ "@budibase/nano": "10.1.5", "@types/koa": "2.13.4", "@types/pouchdb": "6.4.0", - "@types/redlock": "4.0.3", + "@types/redlock": "4.0.7", "rimraf": "3.0.2", "typescript": "5.2.2" }, diff --git a/packages/types/src/api/web/app/datasource.ts b/packages/types/src/api/web/app/datasource.ts index 4a3d07a952..f931665917 100644 --- a/packages/types/src/api/web/app/datasource.ts +++ b/packages/types/src/api/web/app/datasource.ts @@ -32,9 +32,7 @@ export interface FetchDatasourceInfoResponse { tableNames: string[] } -export interface UpdateDatasourceRequest extends Datasource { - datasource: Datasource -} +export interface UpdateDatasourceRequest extends Datasource {} export interface BuildSchemaFromSourceRequest { tablesFilter?: string[] diff --git a/packages/types/src/api/web/app/permission.ts b/packages/types/src/api/web/app/permission.ts index a8ab0e8084..88ff4e9d2f 100644 --- a/packages/types/src/api/web/app/permission.ts +++ b/packages/types/src/api/web/app/permission.ts @@ -1,4 +1,4 @@ -import { PlanType } from "../../../sdk" +import { PermissionLevel, PlanType } from "../../../sdk" export interface ResourcePermissionInfo { role: string @@ -14,3 +14,21 @@ export interface GetResourcePermsResponse { export interface GetDependantResourcesResponse { resourceByType?: Record } + +export interface AddedPermission { + _id?: string + rev?: string + error?: string + reason?: string +} + +export type AddPermissionResponse = AddedPermission[] + +export interface AddPermissionRequest { + roleId: string + resourceId: string + level: PermissionLevel +} + +export interface RemovePermissionRequest extends AddPermissionRequest {} +export interface RemovePermissionResponse extends AddPermissionResponse {} diff --git a/packages/types/src/api/web/app/rows.ts b/packages/types/src/api/web/app/rows.ts index dad3286754..0a43182dfd 100644 --- a/packages/types/src/api/web/app/rows.ts +++ b/packages/types/src/api/web/app/rows.ts @@ -1,6 +1,6 @@ import { SearchFilters, SearchParams } from "../../../sdk" import { Row } from "../../../documents" -import { SortOrder } from "../../../api" +import { PaginationResponse, SortOrder } from "../../../api" import { ReadStream } from "fs" export interface SaveRowRequest extends Row {} @@ -31,12 +31,18 @@ export interface SearchRowResponse { rows: any[] } +export interface PaginatedSearchRowResponse + extends SearchRowResponse, + PaginationResponse {} + export interface ExportRowsRequest { rows: string[] columns?: string[] query?: SearchFilters sort?: string sortOrder?: SortOrder + delimiter?: string + customHeaders?: { [key: string]: string } } export type ExportRowsResponse = ReadStream diff --git a/packages/types/src/api/web/application.ts b/packages/types/src/api/web/application.ts new file mode 100644 index 0000000000..b77e81880b --- /dev/null +++ b/packages/types/src/api/web/application.ts @@ -0,0 +1,47 @@ +import type { PlanType } from "../../sdk" +import type { Layout, App, Screen } from "../../documents" + +export interface CreateAppRequest { + name: string + url?: string + useTemplate?: string + templateName?: string + templateKey?: string + templateFile?: string + includeSampleData?: boolean + encryptionPassword?: string + templateString?: string + file?: { path: string } +} + +export interface DuplicateAppRequest { + name: string + url?: string +} + +export interface DuplicateAppResponse { + duplicateAppId: string + sourceAppId: string + message: string +} + +export interface FetchAppDefinitionResponse { + layouts: Layout[] + screens: Screen[] + libraries: string[] +} + +export interface FetchAppPackageResponse { + application: App + licenseType: PlanType + screens: Screen[] + layouts: Layout[] + clientLibPath: string + hasLock: boolean +} + +export interface PublishResponse { + _id: string + status: string + appUrl: string +} diff --git a/packages/types/src/api/web/auth.ts b/packages/types/src/api/web/auth.ts index 4db36fa27e..30d129f650 100644 --- a/packages/types/src/api/web/auth.ts +++ b/packages/types/src/api/web/auth.ts @@ -19,6 +19,7 @@ export interface UpdateSelfRequest { forceResetPassword?: boolean onboardedAt?: string appFavourites?: string[] + tours?: Record } export interface UpdateSelfResponse { diff --git a/packages/types/src/api/web/automation.ts b/packages/types/src/api/web/automation.ts new file mode 100644 index 0000000000..c1f3d01b2f --- /dev/null +++ b/packages/types/src/api/web/automation.ts @@ -0,0 +1,3 @@ +import { DocumentDestroyResponse } from "@budibase/nano" + +export interface DeleteAutomationResponse extends DocumentDestroyResponse {} diff --git a/packages/types/src/api/web/index.ts b/packages/types/src/api/web/index.ts index 75c246ab9b..8a091afdba 100644 --- a/packages/types/src/api/web/index.ts +++ b/packages/types/src/api/web/index.ts @@ -1,3 +1,4 @@ +export * from "./application" export * from "./analytics" export * from "./auth" export * from "./user" @@ -10,3 +11,7 @@ export * from "./global" export * from "./pagination" export * from "./searchFilter" export * from "./cookies" +export * from "./automation" +export * from "./layout" +export * from "./query" +export * from "./role" diff --git a/packages/types/src/api/web/layout.ts b/packages/types/src/api/web/layout.ts new file mode 100644 index 0000000000..50512777ef --- /dev/null +++ b/packages/types/src/api/web/layout.ts @@ -0,0 +1,5 @@ +import { Layout } from "../../documents" + +export interface SaveLayoutRequest extends Layout {} + +export interface SaveLayoutResponse extends Layout {} diff --git a/packages/types/src/api/web/query.ts b/packages/types/src/api/web/query.ts new file mode 100644 index 0000000000..3959cdea19 --- /dev/null +++ b/packages/types/src/api/web/query.ts @@ -0,0 +1,20 @@ +import { QueryPreview, QuerySchema } from "../../documents" + +export interface PreviewQueryRequest extends QueryPreview {} + +export interface PreviewQueryResponse { + rows: any[] + nestedSchemaFields: { [key: string]: { [key: string]: string | QuerySchema } } + schema: { [key: string]: string | QuerySchema } + info: any + extra: any +} + +export interface ExecuteQueryRequest { + parameters?: { [key: string]: string } + pagination?: any +} + +export interface ExecuteQueryResponse { + data: Record[] +} diff --git a/packages/types/src/api/web/role.ts b/packages/types/src/api/web/role.ts new file mode 100644 index 0000000000..c37dee60e0 --- /dev/null +++ b/packages/types/src/api/web/role.ts @@ -0,0 +1,22 @@ +import { Role } from "../../documents" + +export interface SaveRoleRequest { + _id?: string + _rev?: string + name: string + inherits: string + permissionId: string + version: string +} + +export interface SaveRoleResponse extends Role {} + +export interface FindRoleResponse extends Role {} + +export type FetchRolesResponse = Role[] + +export interface DestroyRoleResponse { + message: string +} + +export type AccessibleRolesResponse = string[] diff --git a/packages/types/src/documents/app/app.ts b/packages/types/src/documents/app/app.ts index 08aafc6527..ae4f3fa6da 100644 --- a/packages/types/src/documents/app/app.ts +++ b/packages/types/src/documents/app/app.ts @@ -1,4 +1,4 @@ -import { User, Document } from "../" +import { User, Document, Plugin } from "../" import { SocketSession } from "../../sdk" export type AppMetadataErrors = { [key: string]: string[] } @@ -24,6 +24,8 @@ export interface App extends Document { icon?: AppIcon features?: AppFeatures automations?: AutomationSettings + usedPlugins?: Plugin[] + upgradableVersion?: string } export interface AppInstance { diff --git a/packages/types/src/documents/app/datasource.ts b/packages/types/src/documents/app/datasource.ts index 67035a2e72..8976e1cae3 100644 --- a/packages/types/src/documents/app/datasource.ts +++ b/packages/types/src/documents/app/datasource.ts @@ -6,6 +6,9 @@ export interface Datasource extends Document { type: string name?: string source: SourceName + // this is a googlesheets specific property which + // can be found in the GSheets schema - pertains to SSO creds + auth?: { type: string } // the config is defined by the schema config?: Record plus?: boolean @@ -36,6 +39,12 @@ export interface RestAuthConfig { config: RestBasicAuthConfig | RestBearerAuthConfig } +export interface DynamicVariable { + name: string + queryId: string + value: string +} + export interface RestConfig { url: string rejectUnauthorized: boolean @@ -47,11 +56,5 @@ export interface RestConfig { staticVariables: { [key: string]: string } - dynamicVariables: [ - { - name: string - queryId: string - value: string - } - ] + dynamicVariables: DynamicVariable[] } diff --git a/packages/types/src/documents/app/layout.ts b/packages/types/src/documents/app/layout.ts index 06542f680d..51ce511712 100644 --- a/packages/types/src/documents/app/layout.ts +++ b/packages/types/src/documents/app/layout.ts @@ -1,6 +1,11 @@ import { Document } from "../document" export interface Layout extends Document { + componentLibraries: string[] + title: string + favicon: string + stylesheets: string[] props: any layoutId?: string + name?: string } diff --git a/packages/types/src/documents/app/query.ts b/packages/types/src/documents/app/query.ts index b1b0a1d780..3227666bf3 100644 --- a/packages/types/src/documents/app/query.ts +++ b/packages/types/src/documents/app/query.ts @@ -62,22 +62,6 @@ export interface PaginationValues { limit: number | null } -export interface PreviewQueryRequest extends Omit { - parameters: {} - flags?: { - urlName?: boolean - } -} - -export interface ExecuteQueryRequest { - parameters?: { [key: string]: string } - pagination?: any -} - -export interface ExecuteQueryResponse { - data: Row[] -} - export enum HttpMethod { GET = "GET", POST = "POST", diff --git a/packages/types/src/documents/app/role.ts b/packages/types/src/documents/app/role.ts index d126a67b16..f32ba810b0 100644 --- a/packages/types/src/documents/app/role.ts +++ b/packages/types/src/documents/app/role.ts @@ -5,4 +5,5 @@ export interface Role extends Document { inherits?: string permissions: { [key: string]: string[] } version?: string + name: string } diff --git a/packages/types/src/documents/app/screen.ts b/packages/types/src/documents/app/screen.ts index 58c00ef3d6..4977c79b0b 100644 --- a/packages/types/src/documents/app/screen.ts +++ b/packages/types/src/documents/app/screen.ts @@ -22,4 +22,5 @@ export interface Screen extends Document { routing: ScreenRouting props: ScreenProps name?: string + pluginAdded?: boolean } diff --git a/packages/types/src/documents/app/table/table.ts b/packages/types/src/documents/app/table/table.ts index f3b8e6df8d..3b419dd811 100644 --- a/packages/types/src/documents/app/table/table.ts +++ b/packages/types/src/documents/app/table/table.ts @@ -1,6 +1,6 @@ import { Document } from "../../document" import { View, ViewV2 } from "../view" -import { RenameColumn } from "../../../sdk" +import { AddColumn, RenameColumn } from "../../../sdk" import { TableSchema } from "./schema" export const INTERNAL_TABLE_SOURCE_ID = "bb_internal" @@ -29,5 +29,6 @@ export interface Table extends Document { export interface TableRequest extends Table { _rename?: RenameColumn + _add?: AddColumn created?: boolean } diff --git a/packages/types/src/documents/document.ts b/packages/types/src/documents/document.ts index 18feb9b518..0de4337f4b 100644 --- a/packages/types/src/documents/document.ts +++ b/packages/types/src/documents/document.ts @@ -38,6 +38,7 @@ export enum DocumentType { AUTOMATION_METADATA = "meta_au", AUDIT_LOG = "al", APP_MIGRATION_METADATA = "_design/migrations", + SCIM_LOG = "scimlog", } // these are the core documents that make up the data, design diff --git a/packages/types/src/documents/global/user.ts b/packages/types/src/documents/global/user.ts index e4cc9d7e2c..a4e6b613c6 100644 --- a/packages/types/src/documents/global/user.ts +++ b/packages/types/src/documents/global/user.ts @@ -55,6 +55,7 @@ export interface User extends Document { dayPassRecordedAt?: string userGroups?: string[] onboardedAt?: string + tours?: Record scimInfo?: { isSync: true } & Record appFavourites?: string[] ssoId?: string diff --git a/packages/types/src/documents/global/userGroup.ts b/packages/types/src/documents/global/userGroup.ts index e6b70a160c..f914b2876d 100644 --- a/packages/types/src/documents/global/userGroup.ts +++ b/packages/types/src/documents/global/userGroup.ts @@ -24,7 +24,7 @@ export interface GroupUser { } export interface UserGroupRoles { - [key: string]: string + [key: string]: string | undefined } export interface SearchGroupRequest {} diff --git a/packages/types/src/documents/pouch.ts b/packages/types/src/documents/pouch.ts index 11efc502be..6ff851a515 100644 --- a/packages/types/src/documents/pouch.ts +++ b/packages/types/src/documents/pouch.ts @@ -5,15 +5,15 @@ export interface RowValue { deleted: boolean } -export interface RowResponse { +export interface RowResponse { id: string key: string error: string - value: T | RowValue + value: T doc?: T } -export interface AllDocsResponse { +export interface AllDocsResponse { offset: number total_rows: number rows: RowResponse[] diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 7a335eb3b9..6b09959b6c 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -1,4 +1,5 @@ -import { Table } from "../documents" +import { Table, Row } from "../documents" +import { QueryJson } from "./search" export const PASSWORD_REPLACEMENT = "--secret-value--" @@ -56,6 +57,7 @@ export enum SourceName { FIRESTORE = "FIRESTORE", REDIS = "REDIS", SNOWFLAKE = "SNOWFLAKE", + BUDIBASE = "BUDIBASE", } export enum IncludeRelationship { @@ -180,11 +182,24 @@ export interface Schema { errors: Record } +// return these when an operation occurred but we got no response +enum DSPlusOperation { + CREATE = "create", + READ = "read", + UPDATE = "update", + DELETE = "delete", +} + +export type DatasourcePlusQueryResponse = Promise< + Row[] | Record[] | void +> + export interface DatasourcePlus extends IntegrationBase { // if the datasource supports the use of bindings directly (to protect against SQL injection) // this returns the format of the identifier getBindingIdentifier(): string getStringConcat(parts: string[]): string + query(json: QueryJson): DatasourcePlusQueryResponse buildSchema( datasourceId: string, entities: Record diff --git a/packages/types/src/sdk/db.ts b/packages/types/src/sdk/db.ts index 9e44a4827f..4d103d5be6 100644 --- a/packages/types/src/sdk/db.ts +++ b/packages/types/src/sdk/db.ts @@ -1,5 +1,11 @@ import type Nano from "@budibase/nano" -import { AllDocsResponse, AnyDocument, Document, ViewTemplateOpts } from "../" +import { + AllDocsResponse, + AnyDocument, + Document, + RowValue, + ViewTemplateOpts, +} from "../" import { Writable } from "stream" export enum SearchIndex { @@ -122,6 +128,7 @@ export interface Database { exists(): Promise get(id?: string): Promise + exists(docId: string): Promise getMultiple( ids: string[], opts?: { allowMissing?: boolean } @@ -135,7 +142,7 @@ export interface Database { opts?: DatabasePutOpts ): Promise bulkDocs(documents: AnyDocument[]): Promise - allDocs( + allDocs( params: DatabaseQueryOpts ): Promise> query( diff --git a/packages/types/src/sdk/events/userGroup.ts b/packages/types/src/sdk/events/userGroup.ts index ea1f554cff..804db5196d 100644 --- a/packages/types/src/sdk/events/userGroup.ts +++ b/packages/types/src/sdk/events/userGroup.ts @@ -48,7 +48,7 @@ export interface GroupAddedOnboardingEvent extends BaseEvent { } export interface GroupPermissionsEditedEvent extends BaseEvent { - permissions: Record + permissions: Record groupId: string audited: { name: string diff --git a/packages/types/src/sdk/search.ts b/packages/types/src/sdk/search.ts index 35fd148c05..7a0ddaed66 100644 --- a/packages/types/src/sdk/search.ts +++ b/packages/types/src/sdk/search.ts @@ -60,6 +60,10 @@ export interface RenameColumn { updated: string } +export interface AddColumn { + name: string +} + export interface RelationshipsJson { through?: string from?: string @@ -94,6 +98,7 @@ export interface QueryJson { idFilter?: SearchFilters } relationships?: RelationshipsJson[] + tableAliases?: Record } export interface SqlQuery { diff --git a/packages/worker/scripts/test.sh b/packages/worker/scripts/test.sh index eba95c4916..17b3ee17f4 100644 --- a/packages/worker/scripts/test.sh +++ b/packages/worker/scripts/test.sh @@ -4,10 +4,10 @@ set -e if [[ -n $CI ]] then # Running in ci, where resources are limited - echo "jest --coverage --maxWorkers=2 --forceExit --bail" - jest --coverage --maxWorkers=2 --forceExit --bail + echo "jest --coverage --maxWorkers=2 --forceExit --bail $@" + jest --coverage --maxWorkers=2 --forceExit --bail $@ else # --maxWorkers performs better in development - echo "jest --coverage --maxWorkers=2 --forceExit" - jest --coverage --maxWorkers=2 --forceExit + echo "jest --coverage --maxWorkers=2 --forceExit $@" + jest --coverage --maxWorkers=2 --forceExit $@ fi \ No newline at end of file diff --git a/packages/worker/src/api/routes/global/tests/groups.spec.ts b/packages/worker/src/api/routes/global/tests/groups.spec.ts index 8f0739a812..b69c4781c4 100644 --- a/packages/worker/src/api/routes/global/tests/groups.spec.ts +++ b/packages/worker/src/api/routes/global/tests/groups.spec.ts @@ -147,7 +147,7 @@ describe("/api/global/groups", () => { await Promise.all( Array.from({ length: 30 }).map(async (_, i) => { - const email = `user${i}@${generator.domain()}` + const email = `user${i}@example.com` const user = await config.api.users.saveUser({ ...structures.users.user(), email, diff --git a/packages/worker/src/api/routes/validation/users.ts b/packages/worker/src/api/routes/validation/users.ts index 2ae2d4984c..fbc85af2d3 100644 --- a/packages/worker/src/api/routes/validation/users.ts +++ b/packages/worker/src/api/routes/validation/users.ts @@ -27,6 +27,7 @@ export const buildSelfSaveValidation = () => { lastName: OPTIONAL_STRING, onboardedAt: Joi.string().optional(), appFavourites: Joi.array().optional(), + tours: Joi.object().optional(), } return auth.joiValidator.body(Joi.object(schema).required().unknown(false)) } diff --git a/packages/worker/src/initPro.ts b/packages/worker/src/initPro.ts index ddc8d2562a..b34d514992 100644 --- a/packages/worker/src/initPro.ts +++ b/packages/worker/src/initPro.ts @@ -1,5 +1,4 @@ import { sdk as proSdk } from "@budibase/pro" -import * as userSdk from "./sdk/users" export const initPro = async () => { await proSdk.init({}) diff --git a/packages/worker/src/tests/TestConfiguration.ts b/packages/worker/src/tests/TestConfiguration.ts index df6726eed1..3ebfb5f020 100644 --- a/packages/worker/src/tests/TestConfiguration.ts +++ b/packages/worker/src/tests/TestConfiguration.ts @@ -280,7 +280,7 @@ class TestConfiguration { const db = context.getGlobalDB() - const id = dbCore.generateDevInfoID(this.user!._id) + const id = dbCore.generateDevInfoID(this.user!._id!) // TODO: dry this.apiKey = encryption.encrypt( `${this.tenantId}${dbCore.SEPARATOR}${utils.newid()}` diff --git a/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts b/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts index 0969b72cf9..01338b609c 100644 --- a/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts +++ b/qa-core/src/account-api/tests/accounts/accounts.cloud.spec.ts @@ -84,7 +84,7 @@ describe("Accounts", () => { }) it("searches by email", async () => { - const email = generator.email() + const email = generator.email({ domain: "example.com" }) // Empty result const [_, emptyBody] = await config.api.accounts.search(email, "email") diff --git a/qa-core/src/internal-api/api/apis/AppAPI.ts b/qa-core/src/internal-api/api/apis/AppAPI.ts index a9f9a6a841..8b291a628e 100644 --- a/qa-core/src/internal-api/api/apis/AppAPI.ts +++ b/qa-core/src/internal-api/api/apis/AppAPI.ts @@ -1,11 +1,10 @@ -import { App } from "@budibase/types" +import { App, CreateAppRequest } from "@budibase/types" import { Response } from "node-fetch" import { RouteConfig, AppPackageResponse, DeployConfig, MessageResponse, - CreateAppRequest, } from "../../../types" import BudibaseInternalAPIClient from "../BudibaseInternalAPIClient" import BaseAPI from "./BaseAPI" diff --git a/qa-core/src/internal-api/fixtures/applications.ts b/qa-core/src/internal-api/fixtures/applications.ts index 01dd18fc6a..59f73ba863 100644 --- a/qa-core/src/internal-api/fixtures/applications.ts +++ b/qa-core/src/internal-api/fixtures/applications.ts @@ -1,5 +1,5 @@ import { generator } from "../../shared" -import { CreateAppRequest } from "../../types" +import { CreateAppRequest } from "@budibase/types" function uniqueWord() { return generator.word() + generator.hash() diff --git a/qa-core/src/internal-api/tests/tables/tables.spec.ts b/qa-core/src/internal-api/tests/tables/tables.spec.ts index 09d8f68e86..a38b8e6059 100644 --- a/qa-core/src/internal-api/tests/tables/tables.spec.ts +++ b/qa-core/src/internal-api/tests/tables/tables.spec.ts @@ -13,17 +13,6 @@ describe("Internal API - Table Operations", () => { await config.afterAll() }) - async function createAppFromTemplate() { - return config.api.apps.create({ - name: generator.word(), - url: `/${generator.word()}`, - useTemplate: "true", - templateName: "Near Miss Register", - templateKey: "app/near-miss-register", - templateFile: undefined, - }) - } - it("Create and delete table, columns and rows", async () => { // create the app await config.createApp(fixtures.apps.appFromTemplate()) diff --git a/qa-core/src/public-api/fixtures/users.ts b/qa-core/src/public-api/fixtures/users.ts index e20c464b34..418b565d2a 100644 --- a/qa-core/src/public-api/fixtures/users.ts +++ b/qa-core/src/public-api/fixtures/users.ts @@ -4,7 +4,7 @@ import { generator } from "../../shared" export const generateUser = ( overrides: Partial = {} ): CreateUserParams => ({ - email: generator.email(), + email: generator.email({ domain: "example.com" }), roles: { [generator.string({ length: 32, alpha: true, numeric: true })]: generator.word(), diff --git a/qa-core/src/shared/BudibaseTestConfiguration.ts b/qa-core/src/shared/BudibaseTestConfiguration.ts index 18b7c89ec8..9a12f3e65d 100644 --- a/qa-core/src/shared/BudibaseTestConfiguration.ts +++ b/qa-core/src/shared/BudibaseTestConfiguration.ts @@ -1,8 +1,8 @@ import { BudibaseInternalAPI } from "../internal-api" import { AccountInternalAPI } from "../account-api" -import { APIRequestOpts, CreateAppRequest, State } from "../types" +import { APIRequestOpts, State } from "../types" import * as fixtures from "../internal-api/fixtures" -import { CreateAccountRequest } from "@budibase/types" +import { CreateAccountRequest, CreateAppRequest } from "@budibase/types" export default class BudibaseTestConfiguration { // apis diff --git a/qa-core/src/types/app.ts b/qa-core/src/types/app.ts deleted file mode 100644 index 7159112024..0000000000 --- a/qa-core/src/types/app.ts +++ /dev/null @@ -1,10 +0,0 @@ -// TODO: Integrate with budibase -export interface CreateAppRequest { - name: string - url: string - useTemplate?: string - templateName?: string - templateKey?: string - templateFile?: string - includeSampleData?: boolean -} diff --git a/qa-core/src/types/index.ts b/qa-core/src/types/index.ts index 9bde46c66e..a44df4ef3c 100644 --- a/qa-core/src/types/index.ts +++ b/qa-core/src/types/index.ts @@ -1,6 +1,5 @@ export * from "./api" export * from "./apiKeyResponse" -export * from "./app" export * from "./appPackage" export * from "./deploy" export * from "./newAccount" diff --git a/yarn.lock b/yarn.lock index 2c367e469b..4025a537a3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1097,7 +1097,7 @@ "@babel/highlight@^7.23.4": version "7.23.4" resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.23.4.tgz#edaadf4d8232e1a961432db785091207ead0621b" - integrity "sha1-7arfTYIy4alhQy23hQkSB+rQYhs= sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A==" + integrity sha512-acGdbYSfp2WheJoJm/EBBBLh/ID8KDc64ISZ9DYtBmC8/Q204PZJLHyzeB5qMzJ5trcOkybd78M4x2KWsUq++A== dependencies: "@babel/helper-validator-identifier" "^7.22.20" chalk "^2.4.2" @@ -1988,14 +1988,14 @@ resolved "https://registry.yarnpkg.com/@babel/regjsgen/-/regjsgen-0.8.0.tgz#f0ba69b075e1f05fb2825b7fad991e7adbb18310" integrity sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA== -"@babel/runtime@^7.10.5": +"@babel/runtime@^7.10.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.21.0": version "7.23.9" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.9.tgz#47791a15e4603bb5f905bc0753801cf21d6345f7" integrity sha512-0CX6F+BI2s9dkUqr08KFrAIZgNFj75rdBU/DjCyYLIaV/quFjkk6T+EJ2LkZHyZTbEV4L5p97mNkUsHl2wLFAw== dependencies: regenerator-runtime "^0.14.0" -"@babel/runtime@^7.12.5", "@babel/runtime@^7.13.10", "@babel/runtime@^7.15.4", "@babel/runtime@^7.21.0", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.12.5", "@babel/runtime@^7.15.4", "@babel/runtime@^7.8.4", "@babel/runtime@^7.9.2": version "7.23.8" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.23.8.tgz#8ee6fe1ac47add7122902f257b8ddf55c898f650" integrity sha512-Y7KbAP984rn1VGMbGqKmBLio9V7y5Je9GvU4rQPCPinCyNfUcToxIXl06d59URp/F3LwinvODxab5N/G6qggkw== @@ -3419,9 +3419,9 @@ tar "^6.1.11" "@mongodb-js/saslprep@^1.1.0": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.1.tgz#9a6c2516bc9188672c4d953ec99760ba49970da7" - integrity sha512-t7c5K033joZZMspnHg/gWPE4kandgc2OxE74aYOtGKfgB9VPuVJPix0H6fhmm2erj5PBJ21mqcx34lpIGtUCsQ== + version "1.1.4" + resolved "https://registry.yarnpkg.com/@mongodb-js/saslprep/-/saslprep-1.1.4.tgz#24ec1c4915a65f5c506bb88c081731450d91bb1c" + integrity sha512-8zJ8N1x51xo9hwPh6AWnKdLGEC5N3lDa6kms1YHmFBoRhTpJR6HG8wWk0td1MVCu9cD4YBrvjZEtd5Obw0Fbnw== dependencies: sparse-bitfield "^3.0.3" @@ -4012,70 +4012,70 @@ estree-walker "^2.0.2" picomatch "^2.3.1" -"@rollup/rollup-android-arm-eabi@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.10.0.tgz#786eaf6372be2fc209cc957c14aa9d3ff8fefe6a" - integrity sha512-/MeDQmcD96nVoRumKUljsYOLqfv1YFJps+0pTrb2Z9Nl/w5qNUysMaWQsrd1mvAlNT4yza1iVyIu4Q4AgF6V3A== +"@rollup/rollup-android-arm-eabi@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.12.0.tgz#38c3abd1955a3c21d492af6b1a1dca4bb1d894d6" + integrity sha512-+ac02NL/2TCKRrJu2wffk1kZ+RyqxVUlbjSagNgPm94frxtr+XDL12E5Ll1enWskLrtrZ2r8L3wED1orIibV/w== -"@rollup/rollup-android-arm64@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.10.0.tgz#0114a042fd6396f4f3233e6171fd5b61a36ed539" - integrity sha512-lvu0jK97mZDJdpZKDnZI93I0Om8lSDaiPx3OiCk0RXn3E8CMPJNS/wxjAvSJJzhhZpfjXsjLWL8LnS6qET4VNQ== +"@rollup/rollup-android-arm64@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.12.0.tgz#3822e929f415627609e53b11cec9a4be806de0e2" + integrity sha512-OBqcX2BMe6nvjQ0Nyp7cC90cnumt8PXmO7Dp3gfAju/6YwG0Tj74z1vKrfRz7qAv23nBcYM8BCbhrsWqO7PzQQ== -"@rollup/rollup-darwin-arm64@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.10.0.tgz#944d007c1dc71a8c9174d11671c0c34bd74a2c81" - integrity sha512-uFpayx8I8tyOvDkD7X6n0PriDRWxcqEjqgtlxnUA/G9oS93ur9aZ8c8BEpzFmsed1TH5WZNG5IONB8IiW90TQg== +"@rollup/rollup-darwin-arm64@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.12.0.tgz#6c082de71f481f57df6cfa3701ab2a7afde96f69" + integrity sha512-X64tZd8dRE/QTrBIEs63kaOBG0b5GVEd3ccoLtyf6IdXtHdh8h+I56C2yC3PtC9Ucnv0CpNFJLqKFVgCYe0lOQ== -"@rollup/rollup-darwin-x64@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.10.0.tgz#1d08cb4521a058d7736ab1c7fe988daf034a2598" - integrity sha512-nIdCX03qFKoR/MwQegQBK+qZoSpO3LESurVAC6s6jazLA1Mpmgzo3Nj3H1vydXp/JM29bkCiuF7tDuToj4+U9Q== +"@rollup/rollup-darwin-x64@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.12.0.tgz#c34ca0d31f3c46a22c9afa0e944403eea0edcfd8" + integrity sha512-cc71KUZoVbUJmGP2cOuiZ9HSOP14AzBAThn3OU+9LcA1+IUqswJyR1cAJj3Mg55HbjZP6OLAIscbQsQLrpgTOg== -"@rollup/rollup-linux-arm-gnueabihf@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.10.0.tgz#4763eec1591bf0e99a54ad3d1ef39cb268ed7b19" - integrity sha512-Fz7a+y5sYhYZMQFRkOyCs4PLhICAnxRX/GnWYReaAoruUzuRtcf+Qnw+T0CoAWbHCuz2gBUwmWnUgQ67fb3FYw== +"@rollup/rollup-linux-arm-gnueabihf@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.12.0.tgz#48e899c1e438629c072889b824a98787a7c2362d" + integrity sha512-a6w/Y3hyyO6GlpKL2xJ4IOh/7d+APaqLYdMf86xnczU3nurFTaVN9s9jOXQg97BE4nYm/7Ga51rjec5nfRdrvA== -"@rollup/rollup-linux-arm64-gnu@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.10.0.tgz#e6dae70c53ace836973526c41803b877cffc6f7b" - integrity sha512-yPtF9jIix88orwfTi0lJiqINnlWo6p93MtZEoaehZnmCzEmLL0eqjA3eGVeyQhMtxdV+Mlsgfwhh0+M/k1/V7Q== +"@rollup/rollup-linux-arm64-gnu@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.12.0.tgz#788c2698a119dc229062d40da6ada8a090a73a68" + integrity sha512-0fZBq27b+D7Ar5CQMofVN8sggOVhEtzFUwOwPppQt0k+VR+7UHMZZY4y+64WJ06XOhBTKXtQB/Sv0NwQMXyNAA== -"@rollup/rollup-linux-arm64-musl@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.10.0.tgz#5692e1a0feba0cc4a933864961afc3211177d242" - integrity sha512-9GW9yA30ib+vfFiwjX+N7PnjTnCMiUffhWj4vkG4ukYv1kJ4T9gHNg8zw+ChsOccM27G9yXrEtMScf1LaCuoWQ== +"@rollup/rollup-linux-arm64-musl@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.12.0.tgz#3882a4e3a564af9e55804beeb67076857b035ab7" + integrity sha512-eTvzUS3hhhlgeAv6bfigekzWZjaEX9xP9HhxB0Dvrdbkk5w/b+1Sxct2ZuDxNJKzsRStSq1EaEkVSEe7A7ipgQ== -"@rollup/rollup-linux-riscv64-gnu@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.10.0.tgz#fbe3d80f7a7ac54a8847f5bddd1bc6f7b9ccb65f" - integrity sha512-X1ES+V4bMq2ws5fF4zHornxebNxMXye0ZZjUrzOrf7UMx1d6wMQtfcchZ8SqUnQPPHdOyOLW6fTcUiFgHFadRA== +"@rollup/rollup-linux-riscv64-gnu@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.12.0.tgz#0c6ad792e1195c12bfae634425a3d2aa0fe93ab7" + integrity sha512-ix+qAB9qmrCRiaO71VFfY8rkiAZJL8zQRXveS27HS+pKdjwUfEhqo2+YF2oI+H/22Xsiski+qqwIBxVewLK7sw== -"@rollup/rollup-linux-x64-gnu@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.10.0.tgz#3f06b55ccf173446d390d0306643dff62ec99807" - integrity sha512-w/5OpT2EnI/Xvypw4FIhV34jmNqU5PZjZue2l2Y3ty1Ootm3SqhI+AmfhlUYGBTd9JnpneZCDnt3uNOiOBkMyw== +"@rollup/rollup-linux-x64-gnu@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.12.0.tgz#9d62485ea0f18d8674033b57aa14fb758f6ec6e3" + integrity sha512-TenQhZVOtw/3qKOPa7d+QgkeM6xY0LtwzR8OplmyL5LrgTWIXpTQg2Q2ycBf8jm+SFW2Wt/DTn1gf7nFp3ssVA== -"@rollup/rollup-linux-x64-musl@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.10.0.tgz#e4ac9b27041c83d7faab6205f62763103eb317ba" - integrity sha512-q/meftEe3QlwQiGYxD9rWwB21DoKQ9Q8wA40of/of6yGHhZuGfZO0c3WYkN9dNlopHlNT3mf5BPsUSxoPuVQaw== +"@rollup/rollup-linux-x64-musl@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.12.0.tgz#50e8167e28b33c977c1f813def2b2074d1435e05" + integrity sha512-LfFdRhNnW0zdMvdCb5FNuWlls2WbbSridJvxOvYWgSBOYZtgBfW9UGNJG//rwMqTX1xQE9BAodvMH9tAusKDUw== -"@rollup/rollup-win32-arm64-msvc@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.10.0.tgz#6ad0d4fb0066f240778ee3f61eecf7aa0357f883" - integrity sha512-NrR6667wlUfP0BHaEIKgYM/2va+Oj+RjZSASbBMnszM9k+1AmliRjHc3lJIiOehtSSjqYiO7R6KLNrWOX+YNSQ== +"@rollup/rollup-win32-arm64-msvc@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.12.0.tgz#68d233272a2004429124494121a42c4aebdc5b8e" + integrity sha512-JPDxovheWNp6d7AHCgsUlkuCKvtu3RB55iNEkaQcf0ttsDU/JZF+iQnYcQJSk/7PtT4mjjVG8N1kpwnI9SLYaw== -"@rollup/rollup-win32-ia32-msvc@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.10.0.tgz#29d50292381311cc8d3623e73b427b7e2e40a653" - integrity sha512-FV0Tpt84LPYDduIDcXvEC7HKtyXxdvhdAOvOeWMWbQNulxViH2O07QXkT/FffX4FqEI02jEbCJbr+YcuKdyyMg== +"@rollup/rollup-win32-ia32-msvc@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.12.0.tgz#366ca62221d1689e3b55a03f4ae12ae9ba595d40" + integrity sha512-fjtuvMWRGJn1oZacG8IPnzIV6GF2/XG+h71FKn76OYFqySXInJtseAqdprVTDTyqPxQOG9Exak5/E9Z3+EJ8ZA== -"@rollup/rollup-win32-x64-msvc@4.10.0": - version "4.10.0" - resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.10.0.tgz#4eedd01af3a82c1acb0fe6d837ebf339c4cbf839" - integrity sha512-OZoJd+o5TaTSQeFFQ6WjFCiltiYVjIdsXxwu/XZ8qRpsvMQr4UsVrE5UyT9RIvsnuF47DqkJKhhVZ2Q9YW9IpQ== +"@rollup/rollup-win32-x64-msvc@4.12.0": + version "4.12.0" + resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.12.0.tgz#9ffdf9ed133a7464f4ae187eb9e1294413fab235" + integrity sha512-ZYmr5mS2wd4Dew/JjT0Fqi2NPB/ZhZ2VvPp7SmvPZb4Y1CG/LRcS6tcRo2cYU7zLK5A7cdbhWnnWmUjoI4qapg== "@roxi/routify@2.18.0": version "2.18.0" @@ -5219,16 +5219,16 @@ integrity sha512-6ckxMjBBD8URvjB6J3NcnuAn5Pkl7t3TizAg+xdlzzQGSPSmBcXf8KoIH0ua/i+tio+ZRUHEXp0HEmvaR4kt0w== "@types/chai-subset@^1.3.3": - version "1.3.5" - resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.5.tgz#3fc044451f26985f45625230a7f22284808b0a9a" - integrity sha512-c2mPnw+xHtXDoHmdtcCXGwyLMiauiAyxWMzhGpqHC4nqI/Y5G2XhTampslK2rb59kpcuHon03UH8W6iYUzw88A== + version "1.3.3" + resolved "https://registry.yarnpkg.com/@types/chai-subset/-/chai-subset-1.3.3.tgz#97893814e92abd2c534de422cb377e0e0bdaac94" + integrity sha512-frBecisrNGz+F4T6bcc+NLeolfiojh5FxW2klu669+8BARtyQv2C/GkNW6FUodVe4BroGMP/wER/YDGc7rEllw== dependencies: "@types/chai" "*" "@types/chai@*", "@types/chai@^4.3.4": - version "4.3.11" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.11.tgz#e95050bf79a932cb7305dd130254ccdf9bde671c" - integrity sha512-qQR1dr2rGIHYlJulmr8Ioq3De0Le9E4MJ5AiaeAETJJpndT1uUNHsGFK3L/UIu+rbkQSdj8J/w2bCsBZc/Y5fQ== + version "4.3.9" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.9.tgz#144d762491967db8c6dea38e03d2206c2623feec" + integrity sha512-69TtiDzu0bcmKQv3yg1Zx409/Kd7r0b5F1PfpYJfSHzLGtB53547V4u+9iqKYsTu/O2ai6KTb0TInNpvuQ3qmg== "@types/chance@1.1.3": version "1.1.3" @@ -5303,10 +5303,10 @@ dependencies: "@types/node" "*" -"@types/dockerode@^3.3.21": - version "3.3.23" - resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-3.3.23.tgz#07b2084013d01e14d5d97856446f4d9c9f27c223" - integrity sha512-Lz5J+NFgZS4cEVhquwjIGH4oQwlVn2h7LXD3boitujBnzOE5o7s9H8hchEjoDK2SlRsJTogdKnQeiJgPPKLIEw== +"@types/dockerode@^3.3.24": + version "3.3.24" + resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-3.3.24.tgz#bea354a4fcd0824a80fd5ea5ede3e8cda71137a7" + integrity sha512-679y69OYusf7Fr2HtdjXPUF6hnHxSA9K4EsuagsMuPno/XpJHjXxCOy2I5YL8POnWbzjsQAi0pyKIYM9HSpQog== dependencies: "@types/docker-modem" "*" "@types/node" "*" @@ -5408,7 +5408,7 @@ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-2.0.1.tgz#20172f9578b225f6c7da63446f56d4ce108d5a65" integrity sha512-/K3ds8TRAfBvi5vfjuz8y6+GiAYBZ0x4tXv1Av6CWBWn0IlADc+ZX9pMq7oU0fNQPnBwIZl3rmeLp6SBApbxSQ== -"@types/ioredis@4.28.10": +"@types/ioredis@4.28.10", "@types/ioredis@^4.28.10": version "4.28.10" resolved "https://registry.yarnpkg.com/@types/ioredis/-/ioredis-4.28.10.tgz#40ceb157a4141088d1394bb87c98ed09a75a06ff" integrity sha512-69LyhUgrXdgcNDv7ogs1qXZomnfOEnSmrmMFqKgt1XMJxmoOSG/u3wYy13yACIfKuMJ8IhKgHafDO3sx19zVQQ== @@ -5623,10 +5623,10 @@ "@types/node" "*" form-data "^3.0.0" -"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0", "@types/node@>=8.1.0": - version "20.11.2" - resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.2.tgz#39cea3fe02fbbc2f80ed283e94e1d24f2d3856fb" - integrity sha512-cZShBaVa+UO1LjWWBPmWRR4+/eY/JR/UIEcDlVsw3okjWEu+rB7/mH6X3B/L+qJVHDLjk9QW/y2upp9wp1yDXA== +"@types/node@*", "@types/node@>=10.0.0", "@types/node@>=12.12.47", "@types/node@>=13.13.4", "@types/node@>=13.7.0": + version "20.10.7" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.10.7.tgz#40fe8faf25418a75de9fe68a8775546732a3a901" + integrity sha512-fRbIKb8C/Y2lXxB5eVMj4IU7xpdox0Lh8bUPEdtLysaylsml1hOOx1+STloRs/B9nf7C6kPRmmg/V7aQW7usNg== dependencies: undici-types "~5.26.4" @@ -5652,10 +5652,17 @@ resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.37.tgz#0bfcd173e8e1e328337473a8317e37b3b14fd30d" integrity sha512-7GgtHCs/QZrBrDzgIJnQtuSvhFSwhyYSI2uafSwZoNt1iOGhEN5fwNrQMjtONyHm9+/LoA4453jH0CMYcr06Pg== +"@types/node@>=8.1.0": + version "20.11.10" + resolved "https://registry.yarnpkg.com/@types/node/-/node-20.11.10.tgz#6c3de8974d65c362f82ee29db6b5adf4205462f9" + integrity sha512-rZEfe/hJSGYmdfX9tvcPMYeYPW2sNl50nsw4jZmRcaG0HIAb0WYEpsB05GOb53vjqpyE9GUhlDQ4jLSoB5q9kg== + dependencies: + undici-types "~5.26.4" + "@types/node@^18.11.18": - version "18.19.13" - resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.13.tgz#c3e989ca967b862a1f6c8c4148fe31865eedaf1a" - integrity sha512-kgnbRDj8ioDyGxoiaXsiu1Ybm/K14ajCgMOkwiqpHrnF7d7QiYRoRqHIpglMMs3DwXinlK4qJ8TZGlj4hfleJg== + version "18.19.10" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.19.10.tgz#4de314ab66faf6bc8ba691021a091ddcdf13a158" + integrity sha512-IZD8kAM02AW1HRDTPOlz3npFava678pr8Ie9Vp8uRhBROXAv8MXT2pCnGZZAKYdromsNQLHQcfWQ6EOatVLtqA== dependencies: undici-types "~5.26.4" @@ -5889,12 +5896,13 @@ dependencies: "@types/node" "*" -"@types/redlock@4.0.3": - version "4.0.3" - resolved "https://registry.yarnpkg.com/@types/redlock/-/redlock-4.0.3.tgz#aeab5fe5f0d433a125f6dcf9a884372ac0cddd4b" - integrity sha512-mcvvrquwREbAqyZALNBIlf49AL9Aa324BG+J/Dv4TAP8g+nxQMBI4/APNqqS99QEY7VTNT9XvsaczCVGK8uNnQ== +"@types/redlock@4.0.7": + version "4.0.7" + resolved "https://registry.yarnpkg.com/@types/redlock/-/redlock-4.0.7.tgz#33ed56f22a38d6b2f2e6ae5ed1b3fc1875a08e6b" + integrity sha512-5D6egBv0fCfdbmnCETjEynVuiwFMEFFc3YFjh9EwhaaVTAi0YmB6UI1swq1S1rjIu+n27ppmlTFDK3D3cadJqg== dependencies: "@types/bluebird" "*" + "@types/ioredis" "^4.28.10" "@types/redis" "^2.8.0" "@types/request@^2.48.7": @@ -6075,9 +6083,9 @@ integrity sha512-xTE1E+YF4aWPJJeUzaZI5DRntlkY3+BCVJi0axFptnjGmAoWxkyREIh/XMrfxVLejwQxMCfDXdICo0VLxThrog== "@types/whatwg-url@^11.0.2": - version "11.0.3" - resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-11.0.3.tgz#9f584c9a9421f0971029ee504dd62a831cb8f3aa" - integrity sha512-z1ELvMijRL1QmU7QuzDkeYXSF2+dXI0ITKoQsIoVKcNBOiK5RMmWy+pYYxJTHFt8vkpZe7UsvRErQwcxZkjoUw== + version "11.0.4" + resolved "https://registry.yarnpkg.com/@types/whatwg-url/-/whatwg-url-11.0.4.tgz#ffed0dc8d89d91f62e3f368fcbda222a487c4f63" + integrity sha512-lXCmTWSHJvf0TRSO58nm978b8HJ/EdsSsEKLd3ODHFjo+3VGAyyTp4v50nWvwtzBxSMQrVOK7tcuN0zGPLICMw== dependencies: "@types/webidl-conversions" "*" @@ -6527,16 +6535,11 @@ acorn-walk@^7.1.1: resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== -acorn-walk@^8.0.2, acorn-walk@^8.1.1: +acorn-walk@^8.0.2, acorn-walk@^8.1.1, acorn-walk@^8.2.0: version "8.2.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA== -acorn-walk@^8.2.0: - version "8.3.2" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.3.2.tgz#7703af9415f1b6db9315d6895503862e231d34aa" - integrity sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A== - acorn@^5.2.1, acorn@^5.7.3: version "5.7.4" resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.7.4.tgz#3e8d8a9947d0599a1796d10225d7432f4a4acf5e" @@ -6547,10 +6550,10 @@ acorn@^7.1.1: resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== -acorn@^8.1.0, acorn@^8.10.0, acorn@^8.11.3, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0: - version "8.11.3" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.3.tgz#71e0b14e13a4ec160724b38fb7b0f233b1b81d7a" - integrity sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg== +acorn@^8.1.0, acorn@^8.10.0, acorn@^8.2.4, acorn@^8.4.1, acorn@^8.5.0, acorn@^8.7.1, acorn@^8.8.1, acorn@^8.8.2, acorn@^8.9.0: + version "8.11.2" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.11.2.tgz#ca0d78b51895be5390a5903c5b3bdcdaf78ae40b" + integrity sha512-nc0Axzp/0FILLEVsm4fNwLCwMttvhEI263QtVPQcbpfZZ3ts0hLsZGOpE6czNlid7CJ9MlyH8reXkpsf3YUY4w== add-stream@^1.0.0: version "1.0.0" @@ -6992,7 +6995,7 @@ asn1.js@^5.0.0, asn1.js@^5.2.0, asn1.js@^5.4.1: minimalistic-assert "^1.0.0" safer-buffer "^2.1.0" -asn1@^0.2.6, asn1@~0.2.3: +asn1@^0.2.4, asn1@^0.2.6, asn1@~0.2.3: version "0.2.6" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== @@ -7024,7 +7027,7 @@ ast-types@0.9.6: resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.9.6.tgz#102c9e9e9005d3e7e3829bf0c4fa24ee862ee9b9" integrity sha512-qEdtR2UH78yyHX/AUNfXmJTlM48XoFZKBdwi1nzkI1mJL21cmbu0cvjxjpkXJ5NENMq42H+hNs8VLJcqXLerBQ== -async-lock@^1.4.0: +async-lock@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/async-lock/-/async-lock-1.4.1.tgz#56b8718915a9b68b10fce2f2a9a3dddf765ef53f" integrity sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ== @@ -7043,7 +7046,12 @@ async@^2.6.3: dependencies: lodash "^4.17.14" -async@^3.2.1, async@^3.2.3, async@^3.2.4: +async@^3.2.1, async@^3.2.3: + version "3.2.4" + resolved "https://registry.yarnpkg.com/async/-/async-3.2.4.tgz#2d22e00f8cddeb5fde5dd33522b56d1cf569a81c" + integrity sha512-iAB+JbDEGXhyIUavoDl9WP/Jj106Kz9DEn1DPgYw5ruDn0e3Wgi3sKFm55sASdGBNOQB8F59d9qQ7deqrHA8wQ== + +async@^3.2.4: version "3.2.5" resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== @@ -7255,6 +7263,33 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== +bare-events@^2.0.0, bare-events@^2.2.0: + version "2.2.1" + resolved "https://registry.yarnpkg.com/bare-events/-/bare-events-2.2.1.tgz#7b6d421f26a7a755e20bf580b727c84b807964c1" + integrity sha512-9GYPpsPFvrWBkelIhOhTWtkeZxVxZOdb3VnFTCzlOo3OjvmTvzLoZFUT8kNFACx0vJej6QPney1Cf9BvzCNE/A== + +bare-fs@^2.1.1: + version "2.2.1" + resolved "https://registry.yarnpkg.com/bare-fs/-/bare-fs-2.2.1.tgz#c1985d8d3e07a178956b072d3af67cb8c1fa9391" + integrity sha512-+CjmZANQDFZWy4PGbVdmALIwmt33aJg8qTkVjClU6X4WmZkTPBDxRHiBn7fpqEWEfF3AC2io++erpViAIQbSjg== + dependencies: + bare-events "^2.0.0" + bare-os "^2.0.0" + bare-path "^2.0.0" + streamx "^2.13.0" + +bare-os@^2.0.0, bare-os@^2.1.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/bare-os/-/bare-os-2.2.0.tgz#24364692984d0bd507621754781b31d7872736b2" + integrity sha512-hD0rOPfYWOMpVirTACt4/nK8mC55La12K5fY1ij8HAdfQakD62M+H4o4tpfKzVGLgRDTuk3vjA4GqGXXCeFbag== + +bare-path@^2.0.0, bare-path@^2.1.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/bare-path/-/bare-path-2.1.0.tgz#830f17fd39842813ca77d211ebbabe238a88cb4c" + integrity sha512-DIIg7ts8bdRKwJRJrUMy/PICEaQZaPGZ26lsSx9MJSwIhSrcdHn7/C8W+XmnG/rKi6BaRcz+JO00CjZteybDtw== + dependencies: + bare-os "^2.1.0" + base62@^1.1.0: version "1.2.8" resolved "https://registry.yarnpkg.com/base62/-/base62-1.2.8.tgz#1264cb0fb848d875792877479dbe8bae6bae3428" @@ -7646,6 +7681,11 @@ bufferutil@^4.0.1: dependencies: node-gyp-build "^4.3.0" +buildcheck@0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.3.tgz#70451897a95d80f7807e68fc412eb2e7e35ff4d5" + integrity sha512-pziaA+p/wdVImfcbsZLNF32EiWyujlQLwolMqUQE8xpKNOH7KmZQaY8sXN7DGOEzPAElo9QTaeNRfGnf3iOJbA== + buildcheck@~0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.6.tgz#89aa6e417cfd1e2196e3f8fe915eb709d2fe4238" @@ -7910,9 +7950,9 @@ catharsis@^0.9.0: lodash "^4.17.15" chai@^4.3.7: - version "4.4.1" - resolved "https://registry.yarnpkg.com/chai/-/chai-4.4.1.tgz#3603fa6eba35425b0f2ac91a009fe924106e50d1" - integrity sha512-13sOfMv2+DWduEU+/xbun3LScLoqN17nBeTLUsmDfKdoiC1fr0n9PU4guu4AhRcOVFk/sW8LyZWHuhWtQZiF+g== + version "4.3.10" + resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.10.tgz#d784cec635e3b7e2ffb66446a63b4e33bd390384" + integrity sha512-0UXG04VuVbruMUYbJ6JctvH0YnC/4q3/AkT18q4NaITo91CUm0liMS9VqzT9vZhVQ/1eqPanMWjBM+Juhfb/9g== dependencies: assertion-error "^1.1.0" check-error "^1.0.3" @@ -8654,6 +8694,14 @@ cosmiconfig@^8.2.0: parse-json "^5.0.0" path-type "^4.0.0" +cpu-features@~0.0.4: + version "0.0.4" + resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.4.tgz#0023475bb4f4c525869c162e4108099e35bf19d8" + integrity sha512-fKiZ/zp1mUwQbnzb9IghXtHtDoTMtNeb8oYGx6kX2SYfhnG0HNdBEBIzB9b5KlXu5DQPhfy3mInbBxFcgwAr3A== + dependencies: + buildcheck "0.0.3" + nan "^2.15.0" + cpu-features@~0.0.9: version "0.0.9" resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.9.tgz#5226b92f0f1c63122b0a3eb84cb8335a4de499fc" @@ -9553,9 +9601,9 @@ diff@^4.0.1: integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== diff@^5.1.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/diff/-/diff-5.2.0.tgz#26ded047cd1179b78b9537d5ef725503ce1ae531" - integrity sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A== + version "5.1.0" + resolved "https://registry.yarnpkg.com/diff/-/diff-5.1.0.tgz#bc52d298c5ea8df9194800224445ed43ffc87e40" + integrity sha512-D+mk+qE8VC/PAUrlAU34N+VfXev0ghe5ywmpqrawphmVZc1bEfn56uo9qpyGp1p4xpzOHkSW4ztBd6L7Xx4ACw== diffie-hellman@^5.0.0: version "5.0.3" @@ -9594,10 +9642,10 @@ docker-compose@^0.23.5, docker-compose@^0.23.6: dependencies: yaml "^1.10.2" -docker-compose@^0.24.2: - version "0.24.3" - resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.24.3.tgz#298d7bb4aaf37b3b45d0e4ef55c7f58ccc39cca9" - integrity sha512-x3/QN3AIOMe7j2c8f/jcycizMft7dl8MluoB9OGPAYCyKHHiPUFqI9GjCcsU0kYy24vYKMCcfR6+5ZaEyQlrxg== +docker-compose@^0.24.6: + version "0.24.6" + resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.24.6.tgz#d1f490a641bdb7ccc07c4d446b264f026f9a1f15" + integrity sha512-VidlUyNzXMaVsuM79sjSvwC4nfojkP2VneL+Zfs538M2XFnffZDhx6veqnz/evCNIYGyz5O+1fgL6+g0NLWTBA== dependencies: yaml "^2.2.2" @@ -9611,7 +9659,16 @@ docker-modem@^3.0.0: split-ca "^1.0.1" ssh2 "^1.11.0" -dockerode@^3.2.1, dockerode@^3.3.5: +dockerode@^3.2.1: + version "3.3.4" + resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.4.tgz#875de614a1be797279caa9fe27e5637cf0e40548" + integrity sha512-3EUwuXnCU+RUlQEheDjmBE0B7q66PV9Rw5NiH1sXwINq0M9c5ERP9fxgkw36ZHOtzf4AGEEYySnkx/sACC9EgQ== + dependencies: + "@balena/dockerignore" "^1.0.2" + docker-modem "^3.0.0" + tar-fs "~2.0.1" + +dockerode@^3.3.5: version "3.3.5" resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.5.tgz#7ae3f40f2bec53ae5e9a741ce655fff459745629" integrity sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA== @@ -9741,9 +9798,9 @@ dotenv@8.6.0, dotenv@^8.2.0: integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== dotenv@^16.3.1: - version "16.3.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" - integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== + version "16.4.1" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.4.1.tgz#1d9931f1d3e5d2959350d1250efab299561f7f11" + integrity sha512-CjA3y+Dr3FyFDOAMnxZEGtnW9KBR2M0JvvUtXNW+dYJL5ROWxP9DUHCwgFqpMk0OXCc0ljhaNTr2w/kutYIcHQ== dotenv@~10.0.0: version "10.0.0" @@ -10791,13 +10848,20 @@ fast-xml-parser@4.2.5: dependencies: strnum "^1.0.5" -fast-xml-parser@^4.1.3, fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: +fast-xml-parser@^4.1.3: version "4.3.3" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.3.tgz#aeaf5778392329f17168c40c51bcbfec8ff965be" integrity sha512-coV/D1MhrShMvU6D0I+VAK3umz6hUaxxhL0yp/9RjfiYUfAv14rDhGQL+PLForhMdr0wq3PiV07WtkkNjJjNHg== dependencies: strnum "^1.0.5" +fast-xml-parser@^4.2.2, fast-xml-parser@^4.2.5: + version "4.3.2" + resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-4.3.2.tgz#761e641260706d6e13251c4ef8e3f5694d4b0d79" + integrity "sha1-dh5kEmBwbW4TJRxO+OP1aU1LDXk= sha512-rmrXUXwbJedoXkStenj1kkljNF7ugn5ZjR9FJcwmCfcCbtOMDghPajbc+Tck6vE6F5XsDmx+Pr2le9fw8+pXBg==" + dependencies: + strnum "^1.0.5" + fastest-levenshtein@^1.0.12: version "1.0.16" resolved "https://registry.yarnpkg.com/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz#210e61b6ff181de91ea9b3d1b84fdedd47e034e5" @@ -10857,7 +10921,7 @@ fetch-cookie@0.11.0: dependencies: tough-cookie "^2.3.3 || ^3.0.1 || ^4.0.0" -fflate@^0.4.1: +fflate@^0.4.1, fflate@^0.4.8: version "0.4.8" resolved "https://registry.yarnpkg.com/fflate/-/fflate-0.4.8.tgz#f90b82aefbd8ac174213abb338bd7ef848f0f5ae" integrity sha512-FJqqoDBR00Mdj9ppamLa/Y7vxm+PRmNWA67N846RvsoYVMKB4q3y/de5PA7gUmRMYK/8CMz2GDZQmCRN1wBcWA== @@ -15606,17 +15670,7 @@ mkdirp@^1.0.3, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mlly@^1.1.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.6.0.tgz#0ecfbddc706857f5e170ccd28c6b0b9c81d3f548" - integrity sha512-YOvg9hfYQmnaB56Yb+KrJE2u0Yzz5zR+sLejEvF4fzwzV1Al6hkf2vyHTwqCRyv0hCi9rVCqVoXpyYevQIRwLQ== - dependencies: - acorn "^8.11.3" - pathe "^1.1.2" - pkg-types "^1.0.3" - ufo "^1.3.2" - -mlly@^1.2.0: +mlly@^1.1.0, mlly@^1.2.0: version "1.4.2" resolved "https://registry.yarnpkg.com/mlly/-/mlly-1.4.2.tgz#7cf406aa319ff6563d25da6b36610a93f2a8007e" integrity sha512-i/Ykufi2t1EZ6NaPLdfnZk2AX8cs0d+mTzVKuPfqPKPatxLApaBoxJQ9x1/uckXtrS/U5oisPMDkNs0yQTaBRg== @@ -15809,6 +15863,11 @@ named-placeholders@^1.1.3: dependencies: lru-cache "^7.14.1" +nan@^2.15.0, nan@^2.16.0: + version "2.17.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb" + integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ== + nan@^2.17.0, nan@^2.18.0: version "2.18.0" resolved "https://registry.yarnpkg.com/nan/-/nan-2.18.0.tgz#26a6faae7ffbeb293a39660e88a76b82e30b7554" @@ -17171,11 +17230,6 @@ pathe@^1.1.0, pathe@^1.1.1: resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.1.tgz#1dd31d382b974ba69809adc9a7a347e65d84829a" integrity sha512-d+RQGp0MAYTIaDBIMmOfMwz3E+LOZnxx1HZd5R18mmCZY0QBlK0LDZfPc8FW8Ed2DlvsuE6PRjroDY+wg4+j/Q== -pathe@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.2.tgz#6c4cb47a945692e48a1ddd6e4094d170516437ec" - integrity sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ== - pathval@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d" @@ -17800,10 +17854,18 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" -posthog-js@^1.13.4, posthog-js@^1.36.0: - version "1.100.0" - resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.100.0.tgz#687b9a6e4ed226aa6572f4040b418ea0c8b3d353" - integrity sha512-r2XZEiHQ9mBK7D1G9k57I8uYZ2kZTAJ0OCX6K/OOdCWN8jKPhw3h5F9No5weilP6eVAn+hrsy7NvPV7SCX7gMg== +posthog-js@^1.13.4: + version "1.103.1" + resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.103.1.tgz#f846c413c28aca204dc1527f49d39f651348f3c4" + integrity sha512-cFXFU4Z4kl/+RUUV4ju1DlfM7dwCGi6H9xWsfhljIhGcBbT8UfS4JGgZGXl9ABQDdgDPb9xciqnysFSsUQshTA== + dependencies: + fflate "^0.4.8" + preact "^10.19.3" + +posthog-js@^1.36.0: + version "1.96.1" + resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.96.1.tgz#4f9719a24e4e14037b0e72d430194d7cdb576447" + integrity sha512-kv1vQqYMt2BV3YHS+wxsbGuP+tz+M3y1AzNhz8TfkpY1HT8W/ONT0i0eQpeRr9Y+d4x/fZ6M4cXG5GMvi9lRCA== dependencies: fflate "^0.4.1" @@ -18044,6 +18106,11 @@ pprof-format@^2.0.7: resolved "https://registry.yarnpkg.com/pprof-format/-/pprof-format-2.0.7.tgz#526e4361f8b37d16b2ec4bb0696b5292de5046a4" integrity sha512-1qWaGAzwMpaXJP9opRa23nPnt2Egi7RMNoNBptEE/XwHbcn4fC2b/4U4bKc5arkGkIh2ZabpF2bEb+c5GNHEKA== +preact@^10.19.3: + version "10.19.3" + resolved "https://registry.yarnpkg.com/preact/-/preact-10.19.3.tgz#7a7107ed2598a60676c943709ea3efb8aaafa899" + integrity sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ== + prebuild-install@^7.1.1: version "7.1.1" resolved "https://registry.yarnpkg.com/prebuild-install/-/prebuild-install-7.1.1.tgz#de97d5b34a70a0c81334fd24641f2a1702352e45" @@ -19259,25 +19326,25 @@ rollup@^3.27.1: fsevents "~2.3.2" rollup@^4.9.6: - version "4.10.0" - resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.10.0.tgz#244c2cb54a8de004a949fe6036a0801be9060456" - integrity sha512-t2v9G2AKxcQ8yrG+WGxctBes1AomT0M4ND7jTFBCVPXQ/WFTvNSefIrNSmLKhIKBrvN8SG+CZslimJcT3W2u2g== + version "4.12.0" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.12.0.tgz#0b6d1e5f3d46bbcf244deec41a7421dc54cc45b5" + integrity sha512-wz66wn4t1OHIJw3+XU7mJJQV/2NAfw5OAk6G6Hoo3zcvz/XOfQ52Vgi+AN4Uxoxi0KBBwk2g8zPrTDA4btSB/Q== dependencies: "@types/estree" "1.0.5" optionalDependencies: - "@rollup/rollup-android-arm-eabi" "4.10.0" - "@rollup/rollup-android-arm64" "4.10.0" - "@rollup/rollup-darwin-arm64" "4.10.0" - "@rollup/rollup-darwin-x64" "4.10.0" - "@rollup/rollup-linux-arm-gnueabihf" "4.10.0" - "@rollup/rollup-linux-arm64-gnu" "4.10.0" - "@rollup/rollup-linux-arm64-musl" "4.10.0" - "@rollup/rollup-linux-riscv64-gnu" "4.10.0" - "@rollup/rollup-linux-x64-gnu" "4.10.0" - "@rollup/rollup-linux-x64-musl" "4.10.0" - "@rollup/rollup-win32-arm64-msvc" "4.10.0" - "@rollup/rollup-win32-ia32-msvc" "4.10.0" - "@rollup/rollup-win32-x64-msvc" "4.10.0" + "@rollup/rollup-android-arm-eabi" "4.12.0" + "@rollup/rollup-android-arm64" "4.12.0" + "@rollup/rollup-darwin-arm64" "4.12.0" + "@rollup/rollup-darwin-x64" "4.12.0" + "@rollup/rollup-linux-arm-gnueabihf" "4.12.0" + "@rollup/rollup-linux-arm64-gnu" "4.12.0" + "@rollup/rollup-linux-arm64-musl" "4.12.0" + "@rollup/rollup-linux-riscv64-gnu" "4.12.0" + "@rollup/rollup-linux-x64-gnu" "4.12.0" + "@rollup/rollup-linux-x64-musl" "4.12.0" + "@rollup/rollup-win32-arm64-msvc" "4.12.0" + "@rollup/rollup-win32-ia32-msvc" "4.12.0" + "@rollup/rollup-win32-x64-msvc" "4.12.0" fsevents "~2.3.2" rotating-file-stream@3.1.0: @@ -19309,7 +19376,14 @@ rxjs@^6.6.6: dependencies: tslib "^1.9.0" -rxjs@^7.5.5, rxjs@^7.8.1: +rxjs@^7.5.5: + version "7.8.0" + resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.0.tgz#90a938862a82888ff4c7359811a595e14e1e09a4" + integrity sha512-F2+gxDshqmIub1KdvZkaEfGDwLNpPvk9Fs6LD/MyQxNgMds/WH9OdDDXOmxUZpME+iSK3rQCctkL0DYyytUqMg== + dependencies: + tslib "^2.1.0" + +rxjs@^7.8.1: version "7.8.1" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.8.1.tgz#6f6f3d99ea8044291efd92e7c7fcf562c4057543" integrity sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg== @@ -20003,7 +20077,18 @@ ssh-remote-port-forward@^1.0.4: "@types/ssh2" "^0.5.48" ssh2 "^1.4.0" -ssh2@^1.11.0, ssh2@^1.4.0: +ssh2@^1.11.0: + version "1.11.0" + resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.11.0.tgz#ce60186216971e12f6deb553dcf82322498fe2e4" + integrity sha512-nfg0wZWGSsfUe/IBJkXVll3PEZ//YH2guww+mP88gTpuSU4FtZN7zu9JoeTGOyCNx2dTDtT9fOpWwlzyj4uOOw== + dependencies: + asn1 "^0.2.4" + bcrypt-pbkdf "^1.0.2" + optionalDependencies: + cpu-features "~0.0.4" + nan "^2.16.0" + +ssh2@^1.4.0: version "1.15.0" resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.15.0.tgz#2f998455036a7f89e0df5847efb5421748d9871b" integrity sha512-C0PHgX4h6lBxYx7hcXwu3QWdh4tg6tZZsTfXcdvc5caW/EMxaB4H9dWsl7qk+F7LAW762hp8VbXOX7x4xUYvEw== @@ -20081,9 +20166,9 @@ statuses@2.0.1, statuses@^2.0.0: integrity sha512-OpZ3zP+jT1PI7I8nemJX4AKmAX070ZkYPVWV/AaKTJl+tXCTGyVdC1a4SL8RUQYEwk/f34ZX8UTykN68FwrqAA== std-env@^3.3.1: - version "3.7.0" - resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.7.0.tgz#c9f7386ced6ecf13360b6c6c55b8aaa4ef7481d2" - integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg== + version "3.4.3" + resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.4.3.tgz#326f11db518db751c83fd58574f449b7c3060910" + integrity sha512-f9aPhy8fYBuMN+sNfakZV18U39PbalgjXG3lLB9WkaYTxijru61wb57V9wxxNthXM5Sd88ETBWi29qLAsHO52Q== step@0.0.x: version "0.0.6" @@ -20121,6 +20206,16 @@ stream-to-array@^2.3.0: dependencies: any-promise "^1.1.0" +streamx@^2.13.0: + version "2.16.1" + resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.16.1.tgz#2b311bd34832f08aa6bb4d6a80297c9caef89614" + integrity sha512-m9QYj6WygWyWa3H1YY69amr4nVgy61xfjys7xO7kviL5rfIEc2naf+ewFiOA+aEJD7y0JO3h2GoiUv4TDwEGzQ== + dependencies: + fast-fifo "^1.1.0" + queue-tick "^1.0.1" + optionalDependencies: + bare-events "^2.2.0" + streamx@^2.15.0: version "2.15.6" resolved "https://registry.yarnpkg.com/streamx/-/streamx-2.15.6.tgz#28bf36997ebc7bf6c08f9eba958735231b833887" @@ -20546,9 +20641,9 @@ svelte-spa-router@^4.0.1: regexparam "2.0.2" svelte@^4.2.10: - version "4.2.10" - resolved "https://registry.yarnpkg.com/svelte/-/svelte-4.2.10.tgz#3bef8d79ca75eb53cc4d03f9fac1546e60393f77" - integrity sha512-Ep06yCaCdgG1Mafb/Rx8sJ1QS3RW2I2BxGp2Ui9LBHSZ2/tO/aGLc5WqPjgiAP6KAnLJGaIr/zzwQlOo1b8MxA== + version "4.2.12" + resolved "https://registry.yarnpkg.com/svelte/-/svelte-4.2.12.tgz#13d98d2274d24d3ad216c8fdc801511171c70bb1" + integrity sha512-d8+wsh5TfPwqVzbm4/HCXC783/KPHV60NvwitJnyTA5lWn1elhXMNWhXGCJ7PwPa8qFUnyJNIyuIRt2mT0WMug== dependencies: "@ampproject/remapping" "^2.2.1" "@jridgewell/sourcemap-codec" "^1.4.15" @@ -20672,14 +20767,16 @@ tar-fs@2.1.1, tar-fs@^2.0.0, tar-fs@^2.1.0: pump "^3.0.0" tar-stream "^2.1.4" -tar-fs@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.4.tgz#a21dc60a2d5d9f55e0089ccd78124f1d3771dbbf" - integrity sha512-5AFQU8b9qLfZCX9zp2duONhPmZv0hGYiBPJsyUdqMjzq/mqVpy/rEUSeHk1+YitmxugaptgBh5oDGU3VsAJq4w== +tar-fs@^3.0.5: + version "3.0.5" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-3.0.5.tgz#f954d77767e4e6edf973384e1eb95f8f81d64ed9" + integrity sha512-JOgGAmZyMgbqpLwct7ZV8VzkEB6pxXFBVErLtb+XCOqzc6w1xiWKI9GVd6bwk68EX7eJ4DWmfXVmq8K2ziZTGg== dependencies: - mkdirp-classic "^0.5.2" pump "^3.0.0" tar-stream "^3.1.5" + optionalDependencies: + bare-fs "^2.1.1" + bare-path "^2.1.0" tar-fs@~2.0.1: version "2.0.1" @@ -20817,25 +20914,25 @@ test-exclude@^6.0.0: glob "^7.1.4" minimatch "^3.0.4" -testcontainers@10.6.0: - version "10.6.0" - resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-10.6.0.tgz#205ad9148e68ff5c43a6209a30b12965acbe89d8" - integrity sha512-FDJ3o3J8IMu1V7Uc6lNZ2MAD8+BV4HdpR/Vf5mHtgYHKdn6k1EbGFwtnvVNOxanJ99FCjf/EU8eA5ZQ4yjlsGA== +testcontainers@10.7.2, testcontainers@^10.7.2: + version "10.7.2" + resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-10.7.2.tgz#619e93200dd47f174b307b40fa830cf023b74c25" + integrity sha512-7d+LVd/4YKp/cutiVMLL5cnj/8p8oYELAVRRyNUM4FyUDz1OLQuwW868nDl7Vd1ZAQxzGeCR+F86FlR9Yw9fMA== dependencies: "@balena/dockerignore" "^1.0.2" - "@types/dockerode" "^3.3.21" + "@types/dockerode" "^3.3.24" archiver "^5.3.2" - async-lock "^1.4.0" + async-lock "^1.4.1" byline "^5.0.0" debug "^4.3.4" - docker-compose "^0.24.2" + docker-compose "^0.24.6" dockerode "^3.3.5" get-port "^5.1.1" node-fetch "^2.7.0" proper-lockfile "^4.1.2" properties-reader "^2.3.0" ssh-remote-port-forward "^1.0.4" - tar-fs "^3.0.4" + tar-fs "^3.0.5" tmp "^0.2.1" testcontainers@4.7.0: @@ -20958,9 +21055,9 @@ tiny-queue@^0.2.0: integrity sha512-EijGsv7kzd9I9g0ByCl6h42BWNGUZrlCSejfrb3AKeHC33SGbASu1VDf5O3rRiiUOhAC9CHdZxFPbZu0HmR70A== tinybench@^2.3.1: - version "2.6.0" - resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.6.0.tgz#1423284ee22de07c91b3752c048d2764714b341b" - integrity sha512-N8hW3PG/3aOoZAN5V/NSAEDz0ZixDSSt5b/a05iqtpgfLWMSVuCo7w0k2vVvEjdrIoeGqZzweX2WlyioNIHchA== + version "2.5.1" + resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.5.1.tgz#3408f6552125e53a5a48adee31261686fd71587e" + integrity sha512-65NKvSuAVDP/n4CqH+a9w2kTlLReS9vhsAP06MWx+/89nMinJyB2icyl58RIcqCmIggpojIGeuJGhjU1aGMBSg== tinycolor2@^1.6.0: version "1.6.0" @@ -21400,11 +21497,6 @@ ufo@^1.3.0: resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.3.1.tgz#e085842f4627c41d4c1b60ebea1f75cdab4ce86b" integrity sha512-uY/99gMLIOlJPwATcMVYfqDSxUR9//AUcgZMzwfSTJPDKzA1S8mX4VLqa+fiAtveraQUBCz4FFcwVZBGbwBXIw== -ufo@^1.3.2: - version "1.4.0" - resolved "https://registry.yarnpkg.com/ufo/-/ufo-1.4.0.tgz#39845b31be81b4f319ab1d99fd20c56cac528d32" - integrity sha512-Hhy+BhRBleFjpJ2vchUNN40qgkh0366FWJGqVLYBHev0vpHTrXSA0ryT+74UiW6KWsldNurQMKGqCm1M2zBciQ== - uglify-js@^3.1.4, uglify-js@^3.7.7: version "3.17.4" resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" @@ -21449,9 +21541,9 @@ underscore@~1.13.2: integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A== undici-types@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.0.1.tgz#62e2af9fcd3ce359634175658de39df8d0f37197" - integrity sha512-i9dNdkCziyqGpFxhatR9LITcInbFWh+ExlWkrZQpZHje8FfCcJKgps0IbmMd7D1o8c8syG4pIOV+aKIoC9JEyA== + version "6.6.2" + resolved "https://registry.yarnpkg.com/undici-types/-/undici-types-6.6.2.tgz#48c65d30bfcae492c3c89b1d147fed9d43a16b79" + integrity sha512-acoBcoBobgsg3YUEO/Oht8JJCuFYpzWLFKbqEbcEZcXdkQrTzkF/yWj9JoLaFDa6ArI31dFEmNZkCjQZ7mlf7w== undici-types@~5.26.4: version "5.26.5" @@ -21464,9 +21556,9 @@ undici@^4.14.1: integrity sha512-tkZSECUYi+/T1i4u+4+lwZmQgLXd4BLGlrc7KZPcLIW7Jpq99+Xpc30ONv7nS6F5UNOxp/HBZSSL9MafUrvJbw== undici@^6.0.1: - version "6.0.1" - resolved "https://registry.yarnpkg.com/undici/-/undici-6.0.1.tgz#385572addca36d1c2b280629cb694b726170027e" - integrity sha512-eZFYQLeS9BiXpsU0cuFhCwfeda2MnC48EVmmOz/eCjsTgmyTdaHdVsPSC/kwC2GtW2e0uH0HIPbadf3/bRWSxw== + version "6.6.2" + resolved "https://registry.yarnpkg.com/undici/-/undici-6.6.2.tgz#8dce5ae54e8a3bc7140c2b2a0972b5fde9a88efb" + integrity sha512-vSqvUE5skSxQJ5sztTZ/CdeJb1Wq0Hf44hlYMciqHghvz+K88U0l7D6u1VsndoFgskDcnU+nG3gYmMzJVzd9Qg== dependencies: "@fastify/busboy" "^2.0.0" @@ -21790,18 +21882,7 @@ vite-plugin-static-copy@^0.17.0: fs-extra "^11.1.0" picocolors "^1.0.0" -"vite@^3.0.0 || ^4.0.0": - version "4.5.2" - resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.2.tgz#d6ea8610e099851dad8c7371599969e0f8b97e82" - integrity sha512-tBCZBNSBbHQkaGyhGCDUGqeo2ph8Fstyp6FMSvTtsXeZSPpSMGlviAOav2hxVTqFcx8Hj/twtWKsMJXNY0xI8w== - dependencies: - esbuild "^0.18.10" - postcss "^8.4.27" - rollup "^3.27.1" - optionalDependencies: - fsevents "~2.3.2" - -vite@^4.5.0: +"vite@^3.0.0 || ^4.0.0", vite@^4.5.0: version "4.5.0" resolved "https://registry.yarnpkg.com/vite/-/vite-4.5.0.tgz#ec406295b4167ac3bc23e26f9c8ff559287cff26" integrity sha512-ulr8rNLA6rkyFAlVWw2q5YJ91v098AFQ2R0PRFwPzREXOUJQPtFUG0t+/ZikhaOCDqFoDhN6/v8Sq0o4araFAw== @@ -22435,7 +22516,12 @@ yaml@^1.10.2: resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== -yaml@^2.1.1, yaml@^2.2.2: +yaml@^2.1.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.2.tgz#f522db4313c671a0ca963a75670f1c12ea909144" + integrity sha512-N/lyzTPaJasoDmfV7YTrYCI0G/3ivm/9wdG0aHuheKowWQwGTsK0Eoiw6utmzAnI6pkJa0DUVygvp3spqqEKXg== + +yaml@^2.2.2: version "2.3.4" resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.3.4.tgz#53fc1d514be80aabf386dc6001eb29bf3b7523b2" integrity sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==